diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e6cb651b4..b3d93c102 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.1.dev0 +current_version = 0.8.2.dev0 commit = False tag = False allow_dirty = False diff --git a/.gitattributes b/.gitattributes index b2966416d..abcad4b6d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,4 @@ notebooks/*.ipynb -linguist-detectable +*.png filter=lfs diff=lfs merge=lfs -text +*.svg filter=lfs diff=lfs merge=lfs -text +logo.svg -filter=lfs -diff=lfs -merge=lfs text diff --git a/.github/actions/python/action.yml b/.github/actions/python/action.yml index ee9d66e3e..124b2140a 100644 --- a/.github/actions/python/action.yml +++ b/.github/actions/python/action.yml @@ -8,7 +8,7 @@ runs: using: "composite" steps: - name: Set up Python ${{ inputs.python_version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} cache: 'pip' diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index be2bb60b7..38d905bee 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -24,12 +24,12 @@ jobs: name: Lint code and check type hints runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python 3.8 uses: ./.github/actions/python with: python_version: 3.8 - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ~/.cache/pre-commit key: pre-commit-${{ env.pythonLocation }}-${{ hashFiles('.pre-commit-config.yaml') }} @@ -43,19 +43,21 @@ jobs: run: | MYPY_VERSION=$(mypy --version | sed 's/[^0-9.]*\([0-9.]*\).*/\1/') echo "key=mypy-$MYPY_VERSION-${{ env.pythonLocation }}" >> $GITHUB_OUTPUT - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: .mypy_cache key: ${{ steps.generate-mypy-cache-key.outputs.key }} - name: Check Type Hints run: mypy src/ + docs: name: Build Docs runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 + lfs: true - name: Setup Python 3.8 uses: ./.github/actions/python with: @@ -66,11 +68,12 @@ jobs: pandoc-version: ${{ env.PANDOC_VERSION }} - name: Build Docs run: mkdocs build + group-tests: strategy: fail-fast: false matrix: - python_version: ["3.8", "3.9", "3.10"] + python_version: ["3.8", "3.9", "3.10", "3.11"] group_number: [1, 2, 3, 4] name: Run Tests - Python ${{ matrix.python_version }} - Group ${{ matrix.group_number }} uses: ./.github/workflows/run-tests-workflow.yaml @@ -79,15 +82,17 @@ jobs: group_number: ${{ matrix.group_number }} python_version: ${{ matrix.python_version }} needs: [code-quality] + notebook-tests: strategy: matrix: - python_version: ["3.8", "3.9", "3.10"] + python_version: ["3.8", "3.9", "3.10", "3.11"] name: Run Notebook tests - Python ${{ matrix.python_version }} uses: ./.github/workflows/run-notebook-tests-workflow.yaml with: python_version: ${{ matrix.python_version }} needs: [code-quality] + push-docs-and-release-testpypi: name: Push Docs and maybe release Package to TestPyPI runs-on: ubuntu-latest @@ -96,7 +101,10 @@ jobs: concurrency: group: publish steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + lfs: true - name: Setup Python 3.8 uses: ./.github/actions/python with: diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index bd15a7663..70beebd20 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -1,4 +1,4 @@ -name: Publish Python Package to PyPI +name: Publish package and docs on: release: @@ -28,12 +28,17 @@ jobs: steps: - name: Checking out last commit in release if: ${{ github.event_name != 'workflow_dispatch' }} - uses: actions/checkout@v3 + uses: actions/checkout@v4 + with: + lfs: true + fetch-depth: 0 - name: Checking out last commit for tag ${{ inputs.tag_name }} - uses: actions/checkout@v3 if: ${{ github.event_name == 'workflow_dispatch' }} + uses: actions/checkout@v4 with: ref: ${{ inputs.tag_name }} + lfs: true + fetch-depth: 0 - name: Fail if running locally if: ${{ env.ACT }} # skip during local actions testing run: | diff --git a/.github/workflows/run-notebook-tests-workflow.yaml b/.github/workflows/run-notebook-tests-workflow.yaml index 639912549..853cec4aa 100644 --- a/.github/workflows/run-notebook-tests-workflow.yaml +++ b/.github/workflows/run-notebook-tests-workflow.yaml @@ -15,7 +15,7 @@ jobs: run-tests: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup Python ${{ inputs.python_version }} @@ -23,7 +23,7 @@ jobs: with: python_version: ${{ inputs.python_version }} - name: Cache Tox Directory for Tests - uses: actions/cache@v3 + uses: actions/cache@v4 with: key: tox-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('tox.ini', 'requirements.txt') }}-${{ inputs.python_version }} path: .tox diff --git a/.github/workflows/run-tests-workflow.yaml b/.github/workflows/run-tests-workflow.yaml index 7904bb811..1ddc8b522 100644 --- a/.github/workflows/run-tests-workflow.yaml +++ b/.github/workflows/run-tests-workflow.yaml @@ -16,6 +16,7 @@ on: type: string required: true + env: PY_COLORS: 1 @@ -23,7 +24,12 @@ jobs: run-tests: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + large-packages: false + docker-images: false + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup Python ${{ inputs.python_version }} @@ -31,7 +37,7 @@ jobs: with: python_version: ${{ inputs.python_version }} - name: Cache Tox Directory for Tests - uses: actions/cache@v3 + uses: actions/cache@v4 with: key: tox-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('tox.ini', 'requirements.txt') }}-${{ inputs.python_version }} path: .tox @@ -40,7 +46,7 @@ jobs: - name: Test Group ${{ inputs.group_number }} run: tox -e tests -- --slow-tests --splits ${{ inputs.split_size }} --group ${{ inputs.group_number }} - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} files: ./coverage.xml diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 8244dbacb..ade8940b2 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -7,7 +7,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v6 + - uses: actions/stale@v9 with: only-labels: 'awaiting-reply' days-before-stale: 30 diff --git a/.test_durations b/.test_durations index bf283f1a9..78f10ff83 100644 --- a/.test_durations +++ b/.test_durations @@ -1,4 +1,74 @@ { + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[conv1d_nn_pert]": 1.1757412470178679, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[conv1d_nn_up]": 1.1531428449961822, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[conv1d_no_grad_up]": 0.40105982599197887, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[conv2d_nn_pert]": 0.49755765599547885, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[conv2d_nn_up]": 0.5384459960041568, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[conv3d_nn_pert]": 0.5693707920436282, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[conv3d_nn_up]": 0.5791179630032275, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[simple_nn_class_up]": 1.515325879008742, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[simple_nn_pert]": 0.31648814197978936, + "tests/influence/test_influence_calculator.py::test_dask_ekfac_influence[simple_nn_up]": 0.345498406997649, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_nn_pert-arnoldi]": 0.7486136490188073, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_nn_pert-cg]": 2.3709864970005583, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_nn_pert-direct]": 0.6317156740115024, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_nn_up-arnoldi]": 0.6990832140145358, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_nn_up-cg]": 2.5801909349975176, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_nn_up-direct]": 0.6321751869982108, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_no_grad_up-arnoldi]": 0.3029264329816215, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_no_grad_up-cg]": 1.2931301009957679, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv1d_no_grad_up-direct]": 0.2809486609767191, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv2d_nn_pert-arnoldi]": 0.5121201520087197, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv2d_nn_pert-cg]": 1.8714381649915595, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv2d_nn_pert-direct]": 0.3564423870120663, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv2d_nn_up-arnoldi]": 0.4797562639869284, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv2d_nn_up-cg]": 1.9824987890315242, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv2d_nn_up-direct]": 0.34359909396152943, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv3d_nn_pert-arnoldi]": 0.5911412620043848, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv3d_nn_pert-cg]": 1.8817940490262117, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv3d_nn_pert-direct]": 0.40603021200513467, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv3d_nn_up-arnoldi]": 0.5107649029814638, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv3d_nn_up-cg]": 3.2007382639567368, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[conv3d_nn_up-direct]": 0.43204710801364854, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_class_up-arnoldi]": 1.2488551850256044, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_class_up-cg]": 16.231199133966584, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_class_up-direct]": 0.8788644630112685, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_pert-arnoldi]": 0.3679435200174339, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_pert-cg]": 1.6276155139494222, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_pert-direct]": 0.3059443189704325, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_up-arnoldi]": 0.29977179697016254, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_up-cg]": 1.5808336699556094, + "tests/influence/test_influence_calculator.py::test_dask_influence_factors[simple_nn_up-direct]": 0.2595673779724166, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[conv1d_nn_pert]": 1.648073843010934, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[conv1d_nn_up]": 2.2320262680004817, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[conv1d_no_grad_up]": 0.739165420003701, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[conv2d_nn_pert]": 1.3300797659903765, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[conv2d_nn_up]": 1.7796139150159433, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[conv3d_nn_pert]": 1.5524548839603085, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[conv3d_nn_up]": 15.148636500030989, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[simple_nn_class_up]": 2.0765505130111706, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[simple_nn_pert]": 0.7969291630142834, + "tests/influence/test_influence_calculator.py::test_dask_influence_nn[simple_nn_up]": 0.7292251750186551, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[conv1d_nn_pert]": 1.2291264449886512, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[conv1d_nn_up]": 1.24865967099322, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[conv1d_no_grad_up]": 0.5143154310062528, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[conv2d_nn_pert]": 0.7841348430083599, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[conv2d_nn_up]": 0.8018321000272408, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[conv3d_nn_pert]": 0.8897617959883064, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[conv3d_nn_up]": 0.7785523339698557, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[simple_nn_class_up]": 2.6527095659985207, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[simple_nn_pert]": 0.5539844660379458, + "tests/influence/test_influence_calculator.py::test_sequential_calculator[simple_nn_up]": 0.5878762130159885, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[conv1d_nn_pert]": 0.9878947690012865, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[conv1d_nn_up]": 0.9868685769906733, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[conv1d_no_grad_up]": 0.31182356498902664, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[conv2d_nn_pert]": 0.38766848100931384, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[conv2d_nn_up]": 0.4189586569846142, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[conv3d_nn_pert]": 0.5046580989728682, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[conv3d_nn_up]": 2.282171113009099, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[simple_nn_class_up]": 1.2807428169762716, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[simple_nn_pert]": 0.2861086849879939, + "tests/influence/test_influence_calculator.py::test_thread_safety_violation_error[simple_nn_up]": 0.3151668659411371, "tests/influence/test_influences.py::test_influence_linear_model[cg-train_set_size_200-perturbation]": 0.8664472580130678, "tests/influence/test_influences.py::test_influence_linear_model[cg-train_set_size_200-up]": 0.18988716599415056, "tests/influence/test_influences.py::test_influence_linear_model[direct-train_set_size_200-perturbation]": 0.66577532098745, @@ -78,61 +148,349 @@ "tests/influence/test_util.py::test_lanzcos_low_rank_hessian_approx[model_data3-8-160-1e-05]": 4.422049004002474, "tests/influence/test_util.py::test_lanzcos_low_rank_hessian_approx[model_data4-4-250-1e-05]": 9.08382142597111, "tests/influence/test_util.py::test_lanzcos_low_rank_hessian_approx_exception": 0.0035210640053264797, - "tests/test_plugin.py::test_failure": 0.001304317032918334, - "tests/test_plugin.py::test_fixture_call_no_arguments": 0.0014436830242630094, - "tests/test_plugin.py::test_fixture_only[1]": 0.0011941569682676345, - "tests/test_plugin.py::test_fixture_only[2]": 0.0013037140015512705, - "tests/test_plugin.py::test_marker_and_fixture[1]": 0.0011783259978983551, - "tests/test_plugin.py::test_marker_and_fixture[2]": 0.001276884024264291, - "tests/test_plugin.py::test_marker_ignore_exception[0]": 0.0011224850022699684, - "tests/test_plugin.py::test_marker_ignore_exception[1]": 0.0009688139834906906, - "tests/test_plugin.py::test_marker_ignore_exception[2]": 0.0011277040175627917, - "tests/test_plugin.py::test_marker_ignore_exception[3]": 0.001226628024596721, - "tests/test_plugin.py::test_marker_ignore_exception[4]": 0.0010670160118024796, - "tests/test_plugin.py::test_marker_only[0]": 0.0027732179732993245, - "tests/test_plugin.py::test_marker_only_with_data_fixture[0]": 0.0012184199877083302, - "tests/test_plugin.py::test_marker_only_with_data_fixture[1]": 0.0014672029938083142, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data0-4-avg]": 0.09813399301492609, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data0-4-full]": 0.043379645998356864, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data1-5-avg]": 0.3009787189948838, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data1-5-full]": 0.09857722400920466, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data2-10-avg]": 0.06352706399047747, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data2-10-full]": 0.02785925197531469, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data3-8-avg]": 0.12275354197481647, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data3-8-full]": 0.038030802010325715, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data4-4-avg]": 1.568861082982039, + "tests/influence/torch/test_functional.py::test_get_hessian[model_data4-4-full]": 0.3527732029906474, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data0-4-avg-no_precomputed_grad]": 0.029053338977973908, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data0-4-avg-precomputed_grad]": 0.024562739999964833, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data0-4-full-no_precomputed_grad]": 0.02318769003613852, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data0-4-full-precomputed_grad]": 0.02072873100405559, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data1-5-avg-no_precomputed_grad]": 0.10399829200468957, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data1-5-avg-precomputed_grad]": 0.08527540299110115, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data1-5-full-no_precomputed_grad]": 0.0487626249960158, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data1-5-full-precomputed_grad]": 0.06472014603787102, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data2-10-avg-no_precomputed_grad]": 0.021364449989050627, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data2-10-avg-precomputed_grad]": 0.020947107987012714, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data2-10-full-no_precomputed_grad]": 0.01564259297447279, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data2-10-full-precomputed_grad]": 0.018716127960942686, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data3-8-avg-no_precomputed_grad]": 0.030198641994502395, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data3-8-avg-precomputed_grad]": 0.02652753097936511, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data3-8-full-no_precomputed_grad]": 0.02417866201722063, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data3-8-full-precomputed_grad]": 0.020271165965823457, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data4-4-avg-no_precomputed_grad]": 0.295251544972416, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data4-4-avg-precomputed_grad]": 0.24220332800177857, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data4-4-full-no_precomputed_grad]": 0.15127054302138276, + "tests/influence/torch/test_functional.py::test_get_hvp_function[model_data4-4-full-precomputed_grad]": 0.12759025098057464, + "tests/influence/torch/test_functional.py::test_hvp[model_data0-1e-05]": 0.01550043502356857, + "tests/influence/torch/test_functional.py::test_hvp[model_data1-1e-05]": 0.03742475199396722, + "tests/influence/torch/test_functional.py::test_hvp[model_data2-1e-05]": 0.008176781033398584, + "tests/influence/torch/test_functional.py::test_hvp[model_data3-1e-05]": 0.010425448999740183, + "tests/influence/torch/test_functional.py::test_hvp[model_data4-1e-05]": 0.015775886015035212, + "tests/influence/torch/test_functional.py::test_matrix_jacobian_product[100-5-110]": 0.008168531028786674, + "tests/influence/torch/test_functional.py::test_matrix_jacobian_product[25-10-500]": 0.013726653007324785, + "tests/influence/torch/test_functional.py::test_matrix_jacobian_product[46-1-632]": 0.008376071986276656, + "tests/influence/torch/test_functional.py::test_matrix_jacobian_product[50-3-120]": 0.0070768119767308235, + "tests/influence/torch/test_functional.py::test_mixed_derivatives[100-5-512]": 2.475869121000869, + "tests/influence/torch/test_functional.py::test_mixed_derivatives[25-10-734]": 0.19724710599984974, + "tests/influence/torch/test_functional.py::test_mixed_derivatives[46-1-1000]": 0.21653579399571754, + "tests/influence/torch/test_functional.py::test_mixed_derivatives[50-3-100]": 0.038392401998862624, + "tests/influence/torch/test_functional.py::test_per_sample_gradient[100-5-120]": 0.008947794995037839, + "tests/influence/torch/test_functional.py::test_per_sample_gradient[25-10-550]": 0.01637960397056304, + "tests/influence/torch/test_functional.py::test_per_sample_gradient[46-6-632]": 0.016884273994946852, + "tests/influence/torch/test_functional.py::test_per_sample_gradient[50-3-120]": 0.007916716014733538, + "tests/influence/torch/test_functional.py::test_randomized_nystroem_approximation[10-5]": 0.005715728009818122, + "tests/influence/torch/test_functional.py::test_randomized_nystroem_approximation[2-1]": 0.023494494991609827, + "tests/influence/torch/test_functional.py::test_randomized_nystroem_approximation[20-20]": 0.009906815976137295, + "tests/influence/torch/test_influence_model.py::test_influence_linear_model[cg-train_set_size_200-perturbation]": 20.91743779098033, + "tests/influence/torch/test_influence_model.py::test_influence_linear_model[cg-train_set_size_200-up]": 21.55937509299838, + "tests/influence/torch/test_influence_model.py::test_influence_linear_model[direct-train_set_size_200-perturbation]": 0.31099940900458023, + "tests/influence/torch/test_influence_model.py::test_influence_linear_model[direct-train_set_size_200-up]": 0.15369610002380796, + "tests/influence/torch/test_influence_model.py::test_influence_linear_model[lissa-train_set_size_200-perturbation]": 34.23643598801573, + "tests/influence/torch/test_influence_model.py::test_influence_linear_model[lissa-train_set_size_200-up]": 30.393810068024322, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[conv1d_nn_pert]": 2.6213616719978745, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[conv1d_nn_up]": 2.9271264809995046, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[conv1d_no_grad_up]": 1.1280039110006328, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[conv2d_nn_pert]": 16.078887900001064, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[conv2d_nn_up]": 16.092805495001812, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[conv3d_nn_pert]": 5.826150597002197, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[conv3d_nn_up]": 5.808433192996745, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[simple_nn_class_up]": 3.4398634410008526, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[simple_nn_pert]": 1.783800326000346, + "tests/influence/torch/test_influence_model.py::test_influences_arnoldi[simple_nn_up]": 1.5235134640006436, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[conv1d_nn_pert]": 0.9141263900091872, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[conv1d_nn_up]": 1.004119770048419, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[conv1d_no_grad_up]": 0.38047996698878706, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[conv2d_nn_pert]": 0.48015616298653185, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[conv2d_nn_up]": 0.5073693199956324, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[conv3d_nn_pert]": 0.6013624930055812, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[conv3d_nn_up]": 0.49367734795669094, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[simple_nn_class_up]": 1.3860138560121413, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[simple_nn_pert]": 0.3072592499956954, + "tests/influence/torch/test_influence_model.py::test_influences_ekfac[simple_nn_up]": 0.3164721610082779, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv1d_nn_pert-arnoldi]": 1.0992808279988822, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv1d_nn_pert-nystroem]": 1.040673425042769, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv1d_nn_up-arnoldi]": 1.1172903449914884, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv1d_nn_up-nystroem]": 1.036920167010976, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv1d_no_grad_up-arnoldi]": 0.44857565098209307, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv1d_no_grad_up-nystroem]": 0.3657014589989558, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv2d_nn_pert-arnoldi]": 10.615373000007821, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv2d_nn_pert-nystroem]": 0.6304801060177851, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv2d_nn_up-arnoldi]": 12.847174925991567, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv2d_nn_up-nystroem]": 0.6427060620044358, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv3d_nn_pert-arnoldi]": 3.3492665359808598, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv3d_nn_pert-nystroem]": 0.7446247620100621, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv3d_nn_up-arnoldi]": 1.3008154160052072, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[conv3d_nn_up-nystroem]": 0.659043225023197, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[simple_nn_class_up-arnoldi]": 1.4185076340218075, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[simple_nn_class_up-nystroem]": 1.341759205010021, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[simple_nn_pert-arnoldi]": 1.5481110329856165, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[simple_nn_pert-nystroem]": 0.7679775689903181, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[simple_nn_up-arnoldi]": 0.6033870960236527, + "tests/influence/torch/test_influence_model.py::test_influences_low_rank[simple_nn_up-nystroem]": 0.4260945589921903, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv1d_nn_pert-cg]": 1.2415003680216614, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv1d_nn_pert-lissa]": 1.446426609007176, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv1d_nn_up-cg]": 1.3278332729823887, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv1d_nn_up-lissa]": 1.458945247984957, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv1d_no_grad_up-cg]": 0.48686093898140825, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv1d_no_grad_up-lissa]": 0.5758301080204546, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv2d_nn_pert-cg]": 0.7778827689762693, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv2d_nn_pert-lissa]": 0.9706131120037753, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv2d_nn_up-cg]": 0.820894897042308, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv2d_nn_up-lissa]": 1.0038924609834794, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv3d_nn_pert-cg]": 0.8033803289872594, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv3d_nn_pert-lissa]": 1.055458217015257, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv3d_nn_up-cg]": 0.8625257119711023, + "tests/influence/torch/test_influence_model.py::test_influences_nn[conv3d_nn_up-lissa]": 1.0259976829984225, + "tests/influence/torch/test_influence_model.py::test_influences_nn[simple_nn_class_up-cg]": 3.199327295005787, + "tests/influence/torch/test_influence_model.py::test_influences_nn[simple_nn_class_up-lissa]": 1.6534321949875448, + "tests/influence/torch/test_influence_model.py::test_influences_nn[simple_nn_pert-cg]": 0.49723594496026635, + "tests/influence/torch/test_influence_model.py::test_influences_nn[simple_nn_pert-lissa]": 0.8214586230169516, + "tests/influence/torch/test_influence_model.py::test_influences_nn[simple_nn_up-cg]": 0.5237956949567888, + "tests/influence/torch/test_influence_model.py::test_influences_nn[simple_nn_up-lissa]": 0.8058031850086991, + "tests/influence/torch/test_util.py::test_align_structure_error[source0-target0]": 0.006448083964642137, + "tests/influence/torch/test_util.py::test_align_structure_error[source1-target1]": 0.00412756999139674, + "tests/influence/torch/test_util.py::test_align_structure_error[source2-unsupported]": 0.0034424379700794816, + "tests/influence/torch/test_util.py::test_align_structure_success[source0-target0]": 0.005712676967959851, + "tests/influence/torch/test_util.py::test_align_structure_success[source1-target1]": 0.006509427999844775, + "tests/influence/torch/test_util.py::test_align_structure_success[source2-target2]": 0.004126884043216705, + "tests/influence/torch/test_util.py::test_batch_hvp[model_data0-1e-05]": 0.008645498979603872, + "tests/influence/torch/test_util.py::test_batch_hvp[model_data1-1e-05]": 0.019303103996207938, + "tests/influence/torch/test_util.py::test_batch_hvp[model_data2-1e-05]": 0.009336137009086087, + "tests/influence/torch/test_util.py::test_batch_hvp[model_data3-1e-05]": 0.008507663034833968, + "tests/influence/torch/test_util.py::test_batch_hvp[model_data4-1e-05]": 0.009635148977395147, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data0-4-avg]": 0.02610064498730935, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data0-4-full]": 0.02072099200449884, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data1-5-avg]": 0.038389799010474235, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data1-5-full]": 0.0304763910244219, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data2-10-avg]": 0.01742512101191096, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data2-10-full]": 0.01769776103901677, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data3-8-avg]": 0.02227302800747566, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data3-8-full]": 0.018950653000501916, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data4-4-avg]": 0.20586689305491745, + "tests/influence/torch/test_util.py::test_get_hvp_function[model_data4-4-full]": 0.12533411505864933, + "tests/influence/torch/test_util.py::test_lanzcos_low_rank_hessian_approx[model_data0-4-200-0.0001]": 3.6178578450053465, + "tests/influence/torch/test_util.py::test_lanzcos_low_rank_hessian_approx[model_data1-5-70-0.001]": 3.19126154697733, + "tests/influence/torch/test_util.py::test_lanzcos_low_rank_hessian_approx[model_data2-10-50-0.0001]": 0.3648842849652283, + "tests/influence/torch/test_util.py::test_lanzcos_low_rank_hessian_approx[model_data3-8-160-1e-05]": 5.310517774982145, + "tests/influence/torch/test_util.py::test_lanzcos_low_rank_hessian_approx[model_data4-4-250-1e-05]": 6.977912158996332, + "tests/influence/torch/test_util.py::test_lanzcos_low_rank_hessian_approx_exception": 0.010342207009671256, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions0-30-5]": 0.19223833800060675, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions0-30-6]": 0.1771155720052775, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions0-45-5]": 0.3583740259928163, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions0-45-6]": 0.23107186099514365, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions0-50-5]": 6.3838129109935835, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions0-50-6]": 3.9026464489870705, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions1-30-5]": 0.20121028998983093, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions1-30-6]": 0.1806995400111191, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions1-45-5]": 0.24759877700125799, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions1-45-6]": 0.22885328001575544, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions1-50-5]": 0.2722237109846901, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions1-50-6]": 0.252675962052308, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions2-30-5]": 0.19425044997478835, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions2-30-6]": 0.17584561900002882, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions2-45-5]": 0.2497809480119031, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions2-45-6]": 0.24781898900982924, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions2-50-5]": 0.26879918097984046, + "tests/influence/torch/test_util.py::test_torch_dataset_to_dask_array[tailing_dimensions2-50-6]": 0.26797316601732746, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data0-3-expected_chunks0]": 0.003192901989677921, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data1-2-expected_chunks1]": 0.003511309012537822, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data2-2-expected_chunks2]": 0.00337875698460266, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data3-3-expected_chunks3]": 0.0032286280184052885, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data4-5-expected_chunks4]": 0.0035956800275016576, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data5-42-expected_chunks5]": 0.003795255965087563, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data6-42-expected_chunks6]": 0.003133207996143028, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data7-4-expected_chunks7]": 0.004749076993903145, + "tests/parallel/test_parallel.py::test_chunkification[joblib-data8-4-expected_chunks8]": 0.0034613849711604416, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data0-3-expected_chunks0]": 0.0031638060172554106, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data1-2-expected_chunks1]": 0.004364188993349671, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data2-2-expected_chunks2]": 0.005876399023691192, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data3-3-expected_chunks3]": 0.0034405409824103117, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data4-5-expected_chunks4]": 0.0038114589697215706, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data5-42-expected_chunks5]": 0.003221377992304042, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data6-42-expected_chunks6]": 0.0032008349953684956, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data7-4-expected_chunks7]": 0.003124484996078536, + "tests/parallel/test_parallel.py::test_chunkification[ray-external-data8-4-expected_chunks8]": 0.0033526600163895637, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data0-3-expected_chunks0]": 0.006123727012891322, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data1-2-expected_chunks1]": 0.0053288199997041374, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data2-2-expected_chunks2]": 0.005889113061130047, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data3-3-expected_chunks3]": 0.006710874993586913, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data4-5-expected_chunks4]": 0.00494873701245524, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data5-42-expected_chunks5]": 0.005258859979221597, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data6-42-expected_chunks6]": 0.0059889889671467245, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data7-4-expected_chunks7]": 0.00598992602317594, + "tests/parallel/test_parallel.py::test_chunkification[ray-local-data8-4-expected_chunks8]": 0.004992969945305958, + "tests/parallel/test_parallel.py::test_effective_n_jobs[joblib]": 0.0040679979720152915, + "tests/parallel/test_parallel.py::test_effective_n_jobs[ray-external]": 3.7438874400395434, + "tests/parallel/test_parallel.py::test_effective_n_jobs[ray-local]": 4.747794731985778, + "tests/parallel/test_parallel.py::test_future_cancellation[joblib]": 0.0032916720083449036, + "tests/parallel/test_parallel.py::test_future_cancellation[ray-external]": 6.0036520949797705, + "tests/parallel/test_parallel.py::test_future_cancellation[ray-local]": 5.18983832403319, + "tests/parallel/test_parallel.py::test_futures_executor_map[joblib]": 1.036263770016376, + "tests/parallel/test_parallel.py::test_futures_executor_map[ray-external]": 0.09937145400908776, + "tests/parallel/test_parallel.py::test_futures_executor_map[ray-local]": 0.1012835220026318, + "tests/parallel/test_parallel.py::test_futures_executor_map_with_max_workers[joblib]": 0.005493839009432122, + "tests/parallel/test_parallel.py::test_futures_executor_map_with_max_workers[ray-external]": 1.0856830689881463, + "tests/parallel/test_parallel.py::test_futures_executor_map_with_max_workers[ray-local]": 1.1039586100087035, + "tests/parallel/test_parallel.py::test_futures_executor_submit[joblib]": 1.0226233069843147, + "tests/parallel/test_parallel.py::test_futures_executor_submit[ray-external]": 0.04702838300727308, + "tests/parallel/test_parallel.py::test_futures_executor_submit[ray-local]": 0.05678758802241646, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-1-list-indices0-expected0]": 0.00467701000161469, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-1-list-indices1-expected1]": 0.0042948100017383695, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-1-list-indices2-expected2]": 0.0030402019910980016, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-1-numpy-indices4-45]": 0.003510809998260811, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-1-range-indices3-expected3]": 0.0033703809895087034, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-2-list-indices0-expected0]": 0.0041150090110022575, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-2-list-indices1-expected1]": 0.004162732977420092, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-2-list-indices2-expected2]": 0.003259133023675531, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-2-numpy-indices4-45]": 0.004144402977544814, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-2-range-indices3-expected3]": 0.004427063016919419, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-4-list-indices0-expected0]": 0.0029944690177217126, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-4-list-indices1-expected1]": 0.003702607995364815, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-4-list-indices2-expected2]": 0.0037922649644315243, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-4-numpy-indices4-45]": 0.003974005987402052, + "tests/parallel/test_parallel.py::test_map_reduce_job[joblib-4-range-indices3-expected3]": 0.003567876963643357, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices0-expected0]": 0.005004247970646247, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices1-expected1]": 0.004017235012724996, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices2-expected2]": 0.004506341996602714, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-1-numpy-indices4-45]": 0.003820352052571252, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-1-range-indices3-expected3]": 0.004570545977912843, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices0-expected0]": 0.004423253994900733, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices1-expected1]": 0.006525125994812697, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices2-expected2]": 0.004407291999086738, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-2-numpy-indices4-45]": 0.003792414005147293, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-2-range-indices3-expected3]": 0.0040772780193947256, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices0-expected0]": 0.003901798016158864, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices1-expected1]": 0.003945255011785775, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices2-expected2]": 0.003970507998019457, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-4-numpy-indices4-45]": 0.003773231990635395, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-external-4-range-indices3-expected3]": 0.0037270110042300075, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices0-expected0]": 0.014644921000581235, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices1-expected1]": 0.016928659984841943, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices2-expected2]": 0.010305310046533123, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-1-numpy-indices4-45]": 0.008892301004379988, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-1-range-indices3-expected3]": 0.014073127938900143, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices0-expected0]": 0.007267932000104338, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices1-expected1]": 0.007973060040967539, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices2-expected2]": 0.008930301031796262, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-2-numpy-indices4-45]": 0.009722396003780887, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-2-range-indices3-expected3]": 0.00856516498606652, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices0-expected0]": 0.013356091978494078, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices1-expected1]": 0.006517391972010955, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices2-expected2]": 0.010245340003166348, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-4-numpy-indices4-45]": 0.006771289015887305, + "tests/parallel/test_parallel.py::test_map_reduce_job[ray-local-4-range-indices3-expected3]": 0.007704173040110618, + "tests/parallel/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[joblib]": 0.004209100996376947, + "tests/parallel/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[ray-external]": 0.002531879028538242, + "tests/parallel/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[ray-local]": 0.006263459014007822, + "tests/parallel/test_parallel.py::test_map_reduce_seeding[joblib-42-12]": 0.015576867037452757, + "tests/parallel/test_parallel.py::test_map_reduce_seeding[ray-external-42-12]": 0.005515481985639781, + "tests/parallel/test_parallel.py::test_map_reduce_seeding[ray-local-42-12]": 0.008114579977700487, + "tests/parallel/test_parallel.py::test_wrap_function[joblib]": 0.0033907410106621683, + "tests/parallel/test_parallel.py::test_wrap_function[ray-external]": 2.7244514969934244, + "tests/parallel/test_parallel.py::test_wrap_function[ray-local]": 3.2604004809982143, + "tests/test_plugin.py::test_failure": 0.0017938569653779268, + "tests/test_plugin.py::test_fixture_call_no_arguments": 0.0019022209744434804, + "tests/test_plugin.py::test_fixture_only[1]": 0.002128774009179324, + "tests/test_plugin.py::test_fixture_only[2]": 0.002033632976235822, + "tests/test_plugin.py::test_marker_and_fixture[1]": 0.0019118520140182227, + "tests/test_plugin.py::test_marker_and_fixture[2]": 0.001956833031727001, + "tests/test_plugin.py::test_marker_ignore_exception[0]": 0.0017019310325849801, + "tests/test_plugin.py::test_marker_ignore_exception[1]": 0.0017486069991718978, + "tests/test_plugin.py::test_marker_ignore_exception[2]": 0.0016994159668684006, + "tests/test_plugin.py::test_marker_ignore_exception[3]": 0.0017209959914907813, + "tests/test_plugin.py::test_marker_ignore_exception[4]": 0.0017584379820618778, + "tests/test_plugin.py::test_marker_only[0]": 0.004290158016374335, + "tests/test_plugin.py::test_marker_only_with_data_fixture[0]": 0.0020319949835538864, + "tests/test_plugin.py::test_marker_only_with_data_fixture[1]": 0.0022678270179312676, "tests/test_plugin.py::test_marker_only_with_data_fixture[2]": 0.0012167239910922945, - "tests/test_results.py::test_adding_different_indices[indices_10-names_10-values_10-indices_20-names_20-values_20-expected_indices0-expected_names0-expected_values0]": 0.0020641259907279164, - "tests/test_results.py::test_adding_different_indices[indices_11-names_11-values_11-indices_21-names_21-values_21-expected_indices1-expected_names1-expected_values1]": 0.002675808995263651, - "tests/test_results.py::test_adding_different_indices[indices_12-names_12-values_12-indices_22-names_22-values_22-expected_indices2-expected_names2-expected_values2]": 0.002674269024282694, - "tests/test_results.py::test_adding_different_indices[indices_13-names_13-values_13-indices_23-names_23-values_23-expected_indices3-expected_names3-expected_values3]": 0.0020707659714389592, - "tests/test_results.py::test_adding_random": 0.0034820580040104687, - "tests/test_results.py::test_dataframe_sorting[values0-names0-ranks_asc0]": 0.0029723149491474032, - "tests/test_results.py::test_dataframe_sorting[values1-names1-ranks_asc1]": 0.002218269946752116, - "tests/test_results.py::test_empty[0]": 0.0012037760170642287, - "tests/test_results.py::test_empty[5]": 0.001365817035548389, - "tests/test_results.py::test_empty_deprecation": 0.0013571020099334419, - "tests/test_results.py::test_equality[values0-names0]": 0.0021291770099196583, - "tests/test_results.py::test_equality[values1-names1]": 0.0016342299932148308, - "tests/test_results.py::test_extra_values[extra_values0]": 0.001437259983504191, - "tests/test_results.py::test_extra_values[extra_values1]": 0.0015066640044096857, - "tests/test_results.py::test_from_random_creation[-1.0-10]": 0.0015409209881909192, - "tests/test_results.py::test_from_random_creation[-1.0-1]": 0.0014630080258939415, - "tests/test_results.py::test_from_random_creation[1.0-10]": 0.0012284110125619918, - "tests/test_results.py::test_from_random_creation[1.0-1]": 0.0013108189741615206, - "tests/test_results.py::test_from_random_creation[None-10]": 0.0012196720344945788, - "tests/test_results.py::test_from_random_creation[None-1]": 0.0015253000019583851, - "tests/test_results.py::test_from_random_creation_errors": 0.0009378239628858864, - "tests/test_results.py::test_get_idx": 0.0010275309905409813, - "tests/test_results.py::test_indexing[values0-names0-ranks_asc0]": 0.0014630479854531586, - "tests/test_results.py::test_indexing[values1-names1-ranks_asc1]": 0.001598447997821495, - "tests/test_results.py::test_iter[values0-names0-ranks_asc0]": 0.0013525879476219416, - "tests/test_results.py::test_iter[values1-names1-ranks_asc1]": 0.0014122460270300508, - "tests/test_results.py::test_names[data_names0]": 0.0015603950014337897, - "tests/test_results.py::test_serialization[values0-None-dumps-loads0]": 0.001649087033001706, - "tests/test_results.py::test_serialization[values0-None-dumps-loads1]": 0.0016458219906780869, - "tests/test_results.py::test_serialization[values1-None-dumps-loads0]": 0.0015400749980472028, - "tests/test_results.py::test_serialization[values1-None-dumps-loads1]": 0.0019450989784672856, - "tests/test_results.py::test_sorting[values0-names0-ranks_asc0]": 0.0016402129840571433, - "tests/test_results.py::test_sorting[values1-names1-ranks_asc1]": 0.0016363860049750656, - "tests/test_results.py::test_todataframe[values0-names0-ranks_asc0]": 0.0023001570079941303, - "tests/test_results.py::test_todataframe[values1-names1-ranks_asc1]": 0.002222412033006549, - "tests/test_results.py::test_types[indices0-int32-data_names0---True]": 0.007417112967232242, + "tests/utils/test_caching.py::test_cached_func_hash_function[foo--False]": 0.004049851006129757, + "tests/utils/test_caching.py::test_cached_func_hash_function[foo-foo-True]": 0.002690388966584578, + "tests/utils/test_caching.py::test_cached_func_hash_function[foo-foo_duplicate-True]": 0.002542914997320622, + "tests/utils/test_caching.py::test_cached_func_hash_function[foo-foo_with_random-False]": 0.0028115719615016133, + "tests/utils/test_caching.py::test_cached_func_hash_function[foo_with_random-foo_with_random_and_sleep-False]": 0.0031461880425922573, "tests/utils/test_caching.py::test_failed_connection": 0.0039788429858163, + "tests/utils/test_caching.py::test_faster_with_repeated_training[disk]": 5.462864195025759, + "tests/utils/test_caching.py::test_faster_with_repeated_training[in-memory]": 5.410583495016908, + "tests/utils/test_caching.py::test_faster_with_repeated_training[memcached]": 5.505492224008776, + "tests/utils/test_caching.py::test_memcached_failed_connection": 0.0027141640021000057, "tests/utils/test_caching.py::test_memcached_faster_with_repeated_training": 5.003239913989091, "tests/utils/test_caching.py::test_memcached_parallel_jobs[joblib]": 3.1677759810409043, "tests/utils/test_caching.py::test_memcached_parallel_jobs[ray-external]": 38.430890925985295, @@ -151,303 +509,454 @@ "tests/utils/test_caching.py::test_memcached_parallel_repeated_training[ray-local-20-2-20-10]": 0.007027510990155861, "tests/utils/test_caching.py::test_memcached_repeated_training": 2.3077823049970903, "tests/utils/test_caching.py::test_memcached_single_job": 0.007132280006771907, - "tests/utils/test_dataset.py::test_creating_dataset_from_sklearn[0.1]": 0.009810923977056518, - "tests/utils/test_dataset.py::test_creating_dataset_from_sklearn[0.5]": 0.0023630280047655106, - "tests/utils/test_dataset.py::test_creating_dataset_from_sklearn[0.8]": 0.002483188029145822, - "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.1-kwargs0]": 0.0022864479979034513, - "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.1-kwargs1]": 0.001960736990440637, - "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.5-kwargs0]": 0.0018571619875729084, - "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.5-kwargs1]": 0.0019256969972047955, - "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.8-kwargs0]": 0.0020103229908272624, - "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.8-kwargs1]": 0.001870437990874052, - "tests/utils/test_dataset.py::test_creating_dataset_subsclassfrom_sklearn[0.1]": 0.004145220998907462, - "tests/utils/test_dataset.py::test_creating_dataset_subsclassfrom_sklearn[0.5]": 0.002273507008794695, - "tests/utils/test_dataset.py::test_creating_dataset_subsclassfrom_sklearn[0.8]": 0.0025340290158055723, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn[0.1]": 0.002445343037834391, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn[0.5]": 0.002387374988757074, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn[0.8]": 0.0025074610312003642, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn_failure[0.1]": 0.0031885300122667104, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn_failure[0.5]": 0.0018069100042339414, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn_failure[0.8]": 0.0019649149908218533, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.1-kwargs0]": 0.002473844971973449, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.1-kwargs1]": 0.0024133779807016253, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.5-kwargs0]": 0.0023138070246204734, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.5-kwargs1]": 0.002177672984544188, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.8-kwargs0]": 0.0030658979958388954, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.8-kwargs1]": 0.002469450992066413, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays_failure[0.1]": 0.0016314840177074075, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays_failure[0.5]": 0.0017394520109519362, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays_failure[0.8]": 0.0017109749896917492, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_subsclassfrom_sklearn[0.1]": 0.003284825972514227, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_subsclassfrom_sklearn[0.5]": 0.0038210980128496885, - "tests/utils/test_dataset.py::test_creating_grouped_dataset_subsclassfrom_sklearn[0.8]": 0.0023955479555297643, - "tests/utils/test_dataset.py::test_grouped_dataset_results": 0.00312941602896899, - "tests/utils/test_numeric.py::test_powerset": 0.002356015960685909, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[0-2-ValueError]": 0.0011365640093572438, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[1-2-ValueError]": 0.0010459299955982715, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[10-1-ValueError]": 0.0011281229672022164, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[10-2-None]": 0.001765107037499547, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[2-10-None]": 0.001528021995909512, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[4--2-ValueError]": 0.0011659429874271154, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[7-23-None]": 0.001419320033164695, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_reproducible[10-2]": 0.001462101994547993, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_reproducible[2-10]": 0.001395261992001906, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_reproducible[7-23]": 0.001416039012838155, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_stochastic[10-2]": 0.0015627649845555425, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_stochastic[2-10]": 0.0014263579796534032, - "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_stochastic[7-23]": 0.0016800050216261297, - "tests/utils/test_numeric.py::test_random_powerset[0-1]": 0.0012409990013111383, - "tests/utils/test_numeric.py::test_random_powerset[1-10]": 0.0014637470012530684, - "tests/utils/test_numeric.py::test_random_powerset[10-1024]": 0.0079122620227281, - "tests/utils/test_numeric.py::test_random_powerset[5-128]": 0.0020825770043302327, - "tests/utils/test_numeric.py::test_random_powerset_label_min[0-10-3-1000]": 0.11152737599331886, - "tests/utils/test_numeric.py::test_random_powerset_label_min[1-10-3-1000]": 0.11375491399667226, - "tests/utils/test_numeric.py::test_random_powerset_label_min[2-10-3-1000]": 0.11396494103246368, - "tests/utils/test_numeric.py::test_random_powerset_reproducible[10-1024]": 0.013066521001746878, - "tests/utils/test_numeric.py::test_random_powerset_stochastic[10-1024]": 0.012338358006672934, - "tests/utils/test_numeric.py::test_random_subset_of_size[0-0-None]": 0.0015464180323760957, - "tests/utils/test_numeric.py::test_random_subset_of_size[0-1-ValueError]": 0.001127758005168289, - "tests/utils/test_numeric.py::test_random_subset_of_size[10-0-None]": 0.0013323969906195998, - "tests/utils/test_numeric.py::test_random_subset_of_size[10-3-None]": 0.0015970039821695536, - "tests/utils/test_numeric.py::test_random_subset_of_size[1000-40-None]": 0.001427212991984561, - "tests/utils/test_numeric.py::test_random_subset_of_size_stochastic[10-3]": 0.001142591005191207, - "tests/utils/test_numeric.py::test_random_subset_of_size_stochastic[1000-40]": 0.0012538870214484632, - "tests/utils/test_numeric.py::test_running_moments": 0.35335890398710035, - "tests/utils/test_parallel.py::test_chunkification[joblib-data0-3-expected_chunks0]": 0.0042906299931928515, - "tests/utils/test_parallel.py::test_chunkification[joblib-data1-2-expected_chunks1]": 0.004308464995119721, - "tests/utils/test_parallel.py::test_chunkification[joblib-data2-2-expected_chunks2]": 0.004244079987984151, - "tests/utils/test_parallel.py::test_chunkification[joblib-data3-3-expected_chunks3]": 0.004028873983770609, - "tests/utils/test_parallel.py::test_chunkification[joblib-data4-5-expected_chunks4]": 0.004101024009287357, - "tests/utils/test_parallel.py::test_chunkification[joblib-data5-42-expected_chunks5]": 0.004789252998307347, - "tests/utils/test_parallel.py::test_chunkification[joblib-data6-42-expected_chunks6]": 0.004256373038515449, - "tests/utils/test_parallel.py::test_chunkification[joblib-data7-4-expected_chunks7]": 0.004143773025134578, - "tests/utils/test_parallel.py::test_chunkification[joblib-data8-4-expected_chunks8]": 0.0040604640380479395, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data0-3-expected_chunks0]": 0.0060307729872874916, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data1-2-expected_chunks1]": 0.005929058010224253, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data2-2-expected_chunks2]": 0.009121662005782127, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data3-3-expected_chunks3]": 0.009956339985365048, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data4-5-expected_chunks4]": 0.010149178997380659, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data5-42-expected_chunks5]": 0.010347278992412612, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data6-42-expected_chunks6]": 0.010047424992080778, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data7-4-expected_chunks7]": 0.008645244990475476, - "tests/utils/test_parallel.py::test_chunkification[ray-external-data8-4-expected_chunks8]": 0.009245932975318283, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data0-3-expected_chunks0]": 0.0045589170476887375, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data1-2-expected_chunks1]": 0.004910157964332029, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data2-2-expected_chunks2]": 0.004910080024274066, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data3-3-expected_chunks3]": 0.0059317940031178296, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data4-5-expected_chunks4]": 0.008992511982796714, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data5-42-expected_chunks5]": 0.008223566022934392, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data6-42-expected_chunks6]": 0.007052068045595661, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data7-4-expected_chunks7]": 0.004718763986602426, - "tests/utils/test_parallel.py::test_chunkification[ray-local-data8-4-expected_chunks8]": 0.005322564014932141, - "tests/utils/test_parallel.py::test_effective_n_jobs[joblib]": 0.0014253620174713433, - "tests/utils/test_parallel.py::test_effective_n_jobs[ray-external]": 3.978927739954088, - "tests/utils/test_parallel.py::test_effective_n_jobs[ray-local]": 4.104055134986993, - "tests/utils/test_parallel.py::test_future_cancellation[joblib]": 0.005014022986870259, - "tests/utils/test_parallel.py::test_future_cancellation[ray-external]": 1.9293224809807725, - "tests/utils/test_parallel.py::test_future_cancellation[ray-local]": 0.07703918303013779, - "tests/utils/test_parallel.py::test_futures_executor_map[joblib]": 1.5601177359640133, - "tests/utils/test_parallel.py::test_futures_executor_map[ray-external]": 0.09417001300607808, - "tests/utils/test_parallel.py::test_futures_executor_map[ray-local]": 0.09271710200118832, - "tests/utils/test_parallel.py::test_futures_executor_map_with_max_workers[joblib]": 0.007176648010499775, - "tests/utils/test_parallel.py::test_futures_executor_map_with_max_workers[ray-external]": 1.090440120024141, - "tests/utils/test_parallel.py::test_futures_executor_map_with_max_workers[ray-local]": 1.095393077004701, - "tests/utils/test_parallel.py::test_futures_executor_submit[joblib]": 1.8566069509834051, - "tests/utils/test_parallel.py::test_futures_executor_submit[ray-external]": 0.04992300402955152, - "tests/utils/test_parallel.py::test_futures_executor_submit[ray-local]": 0.048481280013220385, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-list-indices0-expected0]": 0.0015987549850251526, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-list-indices1-expected1]": 0.001547530002426356, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-list-indices2-expected2]": 0.001560483971843496, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-numpy-indices4-45]": 0.00178057502489537, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-range-indices3-expected3]": 0.0015469170466531068, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-list-indices0-expected0]": 0.0018091480305884033, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-list-indices1-expected1]": 0.01276223495369777, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-list-indices2-expected2]": 0.012882986018666998, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-numpy-indices4-45]": 0.01399321696953848, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-range-indices3-expected3]": 0.012885421980172396, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-list-indices0-expected0]": 0.15361307095736265, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-list-indices1-expected1]": 0.8156346119940281, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-list-indices2-expected2]": 1.3068530370073859, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-numpy-indices4-45]": 0.01750938399345614, - "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-range-indices3-expected3]": 0.017205809010192752, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices0-expected0]": 0.0029827099933754653, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices1-expected1]": 0.0027304230316076428, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices2-expected2]": 0.0026203590095974505, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-numpy-indices4-45]": 0.003456770005868748, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-range-indices3-expected3]": 0.0027074709651060402, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices0-expected0]": 0.8282912400027271, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices1-expected1]": 2.2837093910493422, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices2-expected2]": 2.4645657170040067, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-numpy-indices4-45]": 2.281004316988401, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-range-indices3-expected3]": 2.393285626982106, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices0-expected0]": 1.903353853005683, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices1-expected1]": 2.947957994969329, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices2-expected2]": 3.211508878011955, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-numpy-indices4-45]": 3.3349247129808646, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-range-indices3-expected3]": 3.599037625041092, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices0-expected0]": 0.016201907012145966, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices1-expected1]": 0.013995222019730136, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices2-expected2]": 0.013650566979777068, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-numpy-indices4-45]": 0.013722714997129515, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-range-indices3-expected3]": 0.013983122975332662, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices0-expected0]": 1.5035187809844501, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices1-expected1]": 2.235937710967846, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices2-expected2]": 2.1283504489983898, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-numpy-indices4-45]": 2.0944344620220363, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-range-indices3-expected3]": 2.104675643990049, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices0-expected0]": 1.7145587989652995, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices1-expected1]": 2.772829012013972, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices2-expected2]": 3.1254515810287558, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-numpy-indices4-45]": 3.4023931239789817, - "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-range-indices3-expected3]": 3.7103631219943054, - "tests/utils/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[joblib]": 0.01629631401738152, - "tests/utils/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[ray-external]": 3.550109267991502, - "tests/utils/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[ray-local]": 3.186494815017795, - "tests/utils/test_parallel.py::test_map_reduce_seeding[joblib-42-12]": 0.05403909899177961, - "tests/utils/test_parallel.py::test_map_reduce_seeding[ray-external-42-12]": 9.918427228025394, - "tests/utils/test_parallel.py::test_map_reduce_seeding[ray-local-42-12]": 9.834357938991161, - "tests/utils/test_parallel.py::test_wrap_function[joblib]": 0.0031614619656465948, - "tests/utils/test_parallel.py::test_wrap_function[ray-external]": 3.1981390729779378, - "tests/utils/test_parallel.py::test_wrap_function[ray-local]": 3.2998613989911973, - "tests/utils/test_score.py::test_compose_score": 0.0027295449981465936, - "tests/utils/test_score.py::test_scorer": 0.0051104900194332, - "tests/utils/test_score.py::test_squashed_r2": 0.001943372975802049, - "tests/utils/test_score.py::test_squashed_variance": 0.001487176021328196, - "tests/utils/test_status.py::test_and_status": 0.001112824014853686, - "tests/utils/test_status.py::test_not_status": 0.0010235870140604675, - "tests/utils/test_status.py::test_or_status": 0.0009352969937026501, + "tests/utils/test_caching.py::test_parallel_jobs[disk]": 0.0027724569954443723, + "tests/utils/test_caching.py::test_parallel_jobs[in-memory]": 0.0026654380199033767, + "tests/utils/test_caching.py::test_parallel_jobs[joblib-disk]": 0.008082594998995773, + "tests/utils/test_caching.py::test_parallel_jobs[joblib-in-memory]": 0.007858986000428558, + "tests/utils/test_caching.py::test_parallel_jobs[joblib-memcached]": 5.864486223999847, + "tests/utils/test_caching.py::test_parallel_jobs[memcached]": 0.023942797997733578, + "tests/utils/test_caching.py::test_parallel_jobs[ray-external-disk]": 0.019337756999448175, + "tests/utils/test_caching.py::test_parallel_jobs[ray-external-in-memory]": 3.8737009590004163, + "tests/utils/test_caching.py::test_parallel_jobs[ray-external-memcached]": 0.010424148002130096, + "tests/utils/test_caching.py::test_parallel_jobs[ray-local-disk]": 0.006320855998637853, + "tests/utils/test_caching.py::test_parallel_jobs[ray-local-in-memory]": 0.007159704999139649, + "tests/utils/test_caching.py::test_parallel_jobs[ray-local-memcached]": 0.010268650999933016, + "tests/utils/test_caching.py::test_parallel_repeated_training[disk-20-1-10-5]": 0.0175380950095132, + "tests/utils/test_caching.py::test_parallel_repeated_training[disk-20-1-20-10]": 0.013737032044446096, + "tests/utils/test_caching.py::test_parallel_repeated_training[disk-20-2-10-5]": 0.023939374019391835, + "tests/utils/test_caching.py::test_parallel_repeated_training[disk-20-2-20-10]": 0.02181547399959527, + "tests/utils/test_caching.py::test_parallel_repeated_training[in-memory-20-1-10-5]": 0.015921188984066248, + "tests/utils/test_caching.py::test_parallel_repeated_training[in-memory-20-1-20-10]": 0.011334646958857775, + "tests/utils/test_caching.py::test_parallel_repeated_training[in-memory-20-2-10-5]": 0.017839537031250075, + "tests/utils/test_caching.py::test_parallel_repeated_training[in-memory-20-2-20-10]": 0.01734064202173613, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-disk-20-1-10-5]": 0.040544517996750074, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-disk-20-1-20-10]": 0.041609834999690065, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-disk-20-2-10-5]": 0.450297680001313, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-disk-20-2-20-10]": 0.41885778900177684, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-in-memory-20-1-10-5]": 0.04637932000150613, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-in-memory-20-1-20-10]": 0.038561840998227126, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-in-memory-20-2-10-5]": 4.16153838199898, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-in-memory-20-2-20-10]": 0.47474137900280766, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-memcached-20-1-10-5]": 0.03560425399882661, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-memcached-20-1-20-10]": 0.04425754300064, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-memcached-20-2-10-5]": 0.46746473100029107, + "tests/utils/test_caching.py::test_parallel_repeated_training[joblib-memcached-20-2-20-10]": 0.47426626100059366, + "tests/utils/test_caching.py::test_parallel_repeated_training[memcached-20-1-10-5]": 0.016388149961130694, + "tests/utils/test_caching.py::test_parallel_repeated_training[memcached-20-1-20-10]": 0.01673590997233987, + "tests/utils/test_caching.py::test_parallel_repeated_training[memcached-20-2-10-5]": 0.027201458025956526, + "tests/utils/test_caching.py::test_parallel_repeated_training[memcached-20-2-20-10]": 0.027168086962774396, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-disk-20-1-10-5]": 0.019769640000959043, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-disk-20-1-20-10]": 0.02465987799951108, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-disk-20-2-10-5]": 0.012952293998750974, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-disk-20-2-20-10]": 0.010107056999913766, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-in-memory-20-1-10-5]": 0.013676337999640964, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-in-memory-20-1-20-10]": 0.009283014000175172, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-in-memory-20-2-10-5]": 0.014747097000508802, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-in-memory-20-2-20-10]": 0.012189770999611937, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-memcached-20-1-10-5]": 0.014756809001482907, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-memcached-20-1-20-10]": 0.014543373998094467, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-memcached-20-2-10-5]": 0.018690378999963286, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-external-memcached-20-2-20-10]": 0.017414769001334207, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-disk-20-1-10-5]": 0.00978782600031991, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-disk-20-1-20-10]": 0.008025870998608298, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-disk-20-2-10-5]": 0.00932121699952404, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-disk-20-2-20-10]": 0.012999636999666109, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-in-memory-20-1-10-5]": 0.010384335999333416, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-in-memory-20-1-20-10]": 0.007256282997332164, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-in-memory-20-2-10-5]": 0.007955910998134641, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-in-memory-20-2-20-10]": 0.006997692000368261, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-memcached-20-1-10-5]": 0.008193191000827937, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-memcached-20-1-20-10]": 0.010128158999577863, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-memcached-20-2-10-5]": 0.013161438002498471, + "tests/utils/test_caching.py::test_parallel_repeated_training[ray-local-memcached-20-2-20-10]": 0.009240641998985666, + "tests/utils/test_caching.py::test_repeated_training[disk]": 0.33966069895541295, + "tests/utils/test_caching.py::test_repeated_training[in-memory]": 0.23885704105487093, + "tests/utils/test_caching.py::test_repeated_training[memcached]": 0.36306702499859966, + "tests/utils/test_caching.py::test_single_job[disk]": 0.0037449339870363474, + "tests/utils/test_caching.py::test_single_job[in-memory]": 0.0029351940320339054, + "tests/utils/test_caching.py::test_single_job[memcached]": 0.005145985021954402, + "tests/utils/test_caching.py::test_without_pymemcache": 0.005478426028275862, + "tests/utils/test_dataset.py::test_creating_dataset_from_sklearn[0.1]": 0.008665675995871425, + "tests/utils/test_dataset.py::test_creating_dataset_from_sklearn[0.5]": 0.00487168098334223, + "tests/utils/test_dataset.py::test_creating_dataset_from_sklearn[0.8]": 0.005765877984231338, + "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.1-kwargs0]": 0.004334063007263467, + "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.1-kwargs1]": 0.003661135968286544, + "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.5-kwargs0]": 0.0034260130196344107, + "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.5-kwargs1]": 0.0037207130226306617, + "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.8-kwargs0]": 0.0036306940019130707, + "tests/utils/test_dataset.py::test_creating_dataset_from_x_y_arrays[0.8-kwargs1]": 0.0032633960363455117, + "tests/utils/test_dataset.py::test_creating_dataset_subsclassfrom_sklearn[0.1]": 0.003974422957981005, + "tests/utils/test_dataset.py::test_creating_dataset_subsclassfrom_sklearn[0.5]": 0.004185625031823292, + "tests/utils/test_dataset.py::test_creating_dataset_subsclassfrom_sklearn[0.8]": 0.003982000023825094, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn[0.1]": 0.005084333999548107, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn[0.5]": 0.0051782039809040725, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn[0.8]": 0.00535002100514248, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn_failure[0.1]": 0.003540790028637275, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn_failure[0.5]": 0.004949523019604385, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_sklearn_failure[0.8]": 0.0035746630164794624, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.1-kwargs0]": 0.004036521015223116, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.1-kwargs1]": 0.0036350690061226487, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.5-kwargs0]": 0.004778611968504265, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.5-kwargs1]": 0.004359662008937448, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.8-kwargs0]": 0.004105383006390184, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays[0.8-kwargs1]": 0.003795194992562756, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays_failure[0.1]": 0.002760191011475399, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays_failure[0.5]": 0.0043617659830488265, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_from_x_y_arrays_failure[0.8]": 0.003878229996189475, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_subsclassfrom_sklearn[0.1]": 0.005001434998121113, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_subsclassfrom_sklearn[0.5]": 0.004866438015596941, + "tests/utils/test_dataset.py::test_creating_grouped_dataset_subsclassfrom_sklearn[0.8]": 0.005906352016609162, + "tests/utils/test_dataset.py::test_grouped_dataset_results": 0.013181563990656286, + "tests/utils/test_numeric.py::test_powerset": 0.004835293017094955, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[0-2-ValueError]": 0.0027691390132531524, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[1-2-ValueError]": 0.0026925769925583154, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[10-1-ValueError]": 0.002887230977648869, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[10-2-None]": 0.002661020989762619, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[2-10-None]": 0.006404095009202138, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[4--2-ValueError]": 0.002480320050381124, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number[7-23-None]": 0.002957596007036045, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_reproducible[10-2]": 0.0030029160261619836, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_reproducible[2-10]": 0.0029755540017504245, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_reproducible[7-23]": 0.0025017989974003285, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_stochastic[10-2]": 0.002511145023163408, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_stochastic[2-10]": 0.0028129699639976025, + "tests/utils/test_numeric.py::test_random_matrix_with_condition_number_stochastic[7-23]": 0.0027172630361746997, + "tests/utils/test_numeric.py::test_random_powerset[0-1]": 0.003557410993380472, + "tests/utils/test_numeric.py::test_random_powerset[1-10]": 0.00345834597828798, + "tests/utils/test_numeric.py::test_random_powerset[10-1024]": 0.012248383020050824, + "tests/utils/test_numeric.py::test_random_powerset[5-128]": 0.004129782988457009, + "tests/utils/test_numeric.py::test_random_powerset_label_min[0-10-3-1000]": 0.18313832598505542, + "tests/utils/test_numeric.py::test_random_powerset_label_min[1-10-3-1000]": 0.18961101496824995, + "tests/utils/test_numeric.py::test_random_powerset_label_min[2-10-3-1000]": 0.18344586697639897, + "tests/utils/test_numeric.py::test_random_powerset_reproducible[10-1024]": 0.021032890013884753, + "tests/utils/test_numeric.py::test_random_powerset_stochastic[10-1024]": 0.020289141015382484, + "tests/utils/test_numeric.py::test_random_subset_of_size[0-0-None]": 0.004669596964959055, + "tests/utils/test_numeric.py::test_random_subset_of_size[0-1-ValueError]": 0.002807142009260133, + "tests/utils/test_numeric.py::test_random_subset_of_size[10-0-None]": 0.0025325829919893295, + "tests/utils/test_numeric.py::test_random_subset_of_size[10-3-None]": 0.0026709649828262627, + "tests/utils/test_numeric.py::test_random_subset_of_size[1000-40-None]": 0.0033286060206592083, + "tests/utils/test_numeric.py::test_random_subset_of_size_stochastic[10-3]": 0.0026626450126059353, + "tests/utils/test_numeric.py::test_random_subset_of_size_stochastic[1000-40]": 0.002802837989293039, + "tests/utils/test_numeric.py::test_running_moments": 0.6196783449850045, + "tests/utils/test_parallel.py::test_chunkification[joblib-data0-3-expected_chunks0]": 0.015510658000494004, + "tests/utils/test_parallel.py::test_chunkification[joblib-data1-2-expected_chunks1]": 0.012093620000086958, + "tests/utils/test_parallel.py::test_chunkification[joblib-data2-2-expected_chunks2]": 0.011375399999451474, + "tests/utils/test_parallel.py::test_chunkification[joblib-data3-3-expected_chunks3]": 0.016111063001517323, + "tests/utils/test_parallel.py::test_chunkification[joblib-data4-5-expected_chunks4]": 0.02149817300050927, + "tests/utils/test_parallel.py::test_chunkification[joblib-data5-42-expected_chunks5]": 0.013197087000662577, + "tests/utils/test_parallel.py::test_chunkification[joblib-data6-42-expected_chunks6]": 0.017662769996604766, + "tests/utils/test_parallel.py::test_chunkification[joblib-data7-4-expected_chunks7]": 0.013664767000591382, + "tests/utils/test_parallel.py::test_chunkification[joblib-data8-4-expected_chunks8]": 0.0129568249994918, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data0-3-expected_chunks0]": 0.02873299299972132, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data1-2-expected_chunks1]": 0.037400651001007645, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data2-2-expected_chunks2]": 0.04821507099950395, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data3-3-expected_chunks3]": 0.03959165199921699, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data4-5-expected_chunks4]": 0.030608711000240874, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data5-42-expected_chunks5]": 0.026263547000780818, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data6-42-expected_chunks6]": 0.01923054399958346, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data7-4-expected_chunks7]": 0.020033368999065715, + "tests/utils/test_parallel.py::test_chunkification[ray-external-data8-4-expected_chunks8]": 0.019113988000754034, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data0-3-expected_chunks0]": 0.022260648998781107, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data1-2-expected_chunks1]": 0.02477619599994796, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data2-2-expected_chunks2]": 0.037821603000338655, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data3-3-expected_chunks3]": 0.0276968880007189, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data4-5-expected_chunks4]": 0.03822717000184639, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data5-42-expected_chunks5]": 0.03200487200047064, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data6-42-expected_chunks6]": 0.02251517100012279, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data7-4-expected_chunks7]": 0.02549016900047718, + "tests/utils/test_parallel.py::test_chunkification[ray-local-data8-4-expected_chunks8]": 0.016007507998438086, + "tests/utils/test_parallel.py::test_effective_n_jobs[joblib]": 0.005121522000990808, + "tests/utils/test_parallel.py::test_effective_n_jobs[ray-external]": 4.8416320709984575, + "tests/utils/test_parallel.py::test_effective_n_jobs[ray-local]": 6.68878685799973, + "tests/utils/test_parallel.py::test_future_cancellation[joblib]": 0.013322050999704516, + "tests/utils/test_parallel.py::test_future_cancellation[ray-external]": 6.1742852379975375, + "tests/utils/test_parallel.py::test_future_cancellation[ray-local]": 5.196579726998607, + "tests/utils/test_parallel.py::test_futures_executor_map[joblib]": 2.7167825960004848, + "tests/utils/test_parallel.py::test_futures_executor_map[ray-external]": 0.10519307000140543, + "tests/utils/test_parallel.py::test_futures_executor_map[ray-local]": 0.10775902599925757, + "tests/utils/test_parallel.py::test_futures_executor_map_with_max_workers[joblib]": 0.012954608999280026, + "tests/utils/test_parallel.py::test_futures_executor_map_with_max_workers[ray-external]": 1.1045504180019634, + "tests/utils/test_parallel.py::test_futures_executor_map_with_max_workers[ray-local]": 1.100314563000211, + "tests/utils/test_parallel.py::test_futures_executor_submit[joblib]": 3.2937196319981012, + "tests/utils/test_parallel.py::test_futures_executor_submit[ray-external]": 0.06437306899897521, + "tests/utils/test_parallel.py::test_futures_executor_submit[ray-local]": 0.05545763400186843, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-list-indices0-expected0]": 0.0033702880009514047, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-list-indices1-expected1]": 0.003624205000960501, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-list-indices2-expected2]": 0.0034593179989315104, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-numpy-indices4-45]": 0.003431146000366425, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-1-range-indices3-expected3]": 0.003291076000095927, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-list-indices0-expected0]": 0.0043230089995631715, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-list-indices1-expected1]": 0.014759305000552558, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-list-indices2-expected2]": 0.014669898000647663, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-numpy-indices4-45]": 0.014518962998408824, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-2-range-indices3-expected3]": 0.014446292998400168, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-list-indices0-expected0]": 0.16248785400057386, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-list-indices1-expected1]": 2.277719737998268, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-list-indices2-expected2]": 3.347688416000892, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-numpy-indices4-45]": 0.04604001000188873, + "tests/utils/test_parallel.py::test_map_reduce_job[joblib-4-range-indices3-expected3]": 0.057255595000242465, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices0-expected0]": 0.026082702997882734, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices1-expected1]": 0.023299047999898903, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-list-indices2-expected2]": 0.02191418300026271, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-numpy-indices4-45]": 0.02673473200047738, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-1-range-indices3-expected3]": 0.027526039999429486, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices0-expected0]": 3.4228467769989948, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices1-expected1]": 4.798353305001001, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-list-indices2-expected2]": 4.636959622999711, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-numpy-indices4-45]": 4.028821964997405, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-2-range-indices3-expected3]": 4.398552747999929, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices0-expected0]": 3.734075545000451, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices1-expected1]": 5.287959784998748, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-list-indices2-expected2]": 6.245923890002814, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-numpy-indices4-45]": 6.61028953999994, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-external-4-range-indices3-expected3]": 6.340780258999075, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices0-expected0]": 0.026392571999167558, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices1-expected1]": 0.0228169030015124, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-list-indices2-expected2]": 0.026224847002595197, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-numpy-indices4-45]": 0.02119264299835777, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-1-range-indices3-expected3]": 0.02678771700084326, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices0-expected0]": 2.813331847997688, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices1-expected1]": 4.129950463000569, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-list-indices2-expected2]": 4.1853057150001405, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-numpy-indices4-45]": 3.9139689650000946, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-2-range-indices3-expected3]": 4.066097430000809, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices0-expected0]": 3.626414754000507, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices1-expected1]": 5.354816800998378, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-list-indices2-expected2]": 6.589774920001219, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-numpy-indices4-45]": 6.373054822000995, + "tests/utils/test_parallel.py::test_map_reduce_job[ray-local-4-range-indices3-expected3]": 6.71076984499814, + "tests/utils/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[joblib]": 0.03710782099915377, + "tests/utils/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[ray-external]": 6.3739082100000815, + "tests/utils/test_parallel.py::test_map_reduce_job_partial_map_and_reduce_func[ray-local]": 6.171818285998597, + "tests/utils/test_parallel.py::test_map_reduce_seeding[joblib-42-12]": 0.16202725999937684, + "tests/utils/test_parallel.py::test_map_reduce_seeding[ray-external-42-12]": 19.644846438999593, + "tests/utils/test_parallel.py::test_map_reduce_seeding[ray-local-42-12]": 19.494929903998127, + "tests/utils/test_parallel.py::test_wrap_function[joblib]": 0.010273419000441208, + "tests/utils/test_parallel.py::test_wrap_function[ray-external]": 4.3178896000026725, + "tests/utils/test_parallel.py::test_wrap_function[ray-local]": 4.386876819999088, + "tests/utils/test_score.py::test_compose_score": 0.002674737013876438, + "tests/utils/test_score.py::test_scorer": 0.0035687130002770573, + "tests/utils/test_score.py::test_squashed_r2": 0.0024596559815108776, + "tests/utils/test_score.py::test_squashed_variance": 0.0037596760084852576, + "tests/utils/test_status.py::test_and_status": 0.0021642329811584204, + "tests/utils/test_status.py::test_not_status": 0.0017621670267544687, + "tests/utils/test_status.py::test_or_status": 0.0020827190310228616, "tests/utils/test_utility.py::test_cache[2-0-8]": 0.00677607100806199, - "tests/utils/test_utility.py::test_data_utility_learning_wrapper[10-2-0-8]": 0.004311377968406305, - "tests/utils/test_utility.py::test_data_utility_learning_wrapper[2-2-0-8]": 0.0040499519964214414, + "tests/utils/test_utility.py::test_data_utility_learning_wrapper[10-2-0-8]": 0.007034309994196519, + "tests/utils/test_utility.py::test_data_utility_learning_wrapper[2-2-0-8]": 0.01102408699807711, "tests/utils/test_utility.py::test_different_cache_signature[model_kwargs0-2-0-8]": 0.0038117940130177885, "tests/utils/test_utility.py::test_different_cache_signature[model_kwargs1-2-0-8]": 0.0034867670328821987, - "tests/utils/test_utility.py::test_utility_show_warnings[4-4-False]": 0.00734079402172938, - "tests/utils/test_utility.py::test_utility_show_warnings[4-4-True]": 0.007422954018693417, + "tests/utils/test_utility.py::test_different_utility_with_same_cache[2-0-8]": 0.00709219096461311, + "tests/utils/test_utility.py::test_utility_serialization[False-2-0-8]": 0.004958142031682655, + "tests/utils/test_utility.py::test_utility_serialization[True-2-0-8]": 0.004273276019375771, + "tests/utils/test_utility.py::test_utility_show_warnings[4-4-False]": 0.011898054013727233, + "tests/utils/test_utility.py::test_utility_show_warnings[4-4-True]": 0.011095394002040848, + "tests/utils/test_utility.py::test_utility_with_cache[2-0-8]": 0.010787420003907755, + "tests/value/least_core/test_common.py::test_lc_solve_problems[test_game0]": 0.18890138398273848, "tests/value/least_core/test_common.py::test_lc_solve_problems[test_utility0]": 3.0655845460132696, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False--1-test_game0-0.1-128]": 0.05052897802670486, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False--1-test_game1-0.2-10000]": 1.0599143210274633, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False--1-test_utility0-0.1-128]": 0.05090764199849218, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False--1-test_utility1-0.2-10000]": 0.39550038598827086, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False-1-test_game0-0.1-128]": 0.045558749028714374, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False-1-test_game1-0.2-10000]": 1.016920865047723, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False-1-test_utility0-0.1-128]": 0.054777625045971945, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[False-1-test_utility1-0.2-10000]": 0.7125970929628238, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True--1-test_game0-0.1-128]": 0.048427555011585355, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True--1-test_game1-0.2-10000]": 1.0079815899953246, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True--1-test_utility0-0.1-128]": 6.515727574034827, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True--1-test_utility1-0.2-10000]": 0.6112625639943872, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True-1-test_game0-0.1-128]": 0.04923934198450297, + "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True-1-test_game1-0.2-10000]": 1.0299546059977729, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True-1-test_utility0-0.1-128]": 0.07473104700329714, "tests/value/least_core/test_montecarlo.py::test_montecarlo_least_core[True-1-test_utility1-0.2-10000]": 0.7888634809933137, + "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_game0]": 0.022803050029324368, + "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_game1]": 0.024595678987680003, + "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_game2]": 0.022240159974899143, + "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_game3]": 0.021910707029746845, + "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_game4]": 0.025111915019806474, "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_utility0]": 0.024124946998199448, "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_utility1]": 0.02425819096970372, "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_utility2]": 0.023533977015176788, "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_utility3]": 0.023558928980492055, "tests/value/least_core/test_naive.py::test_naive_least_core[False-test_utility4]": 0.024587185034761205, + "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_game0]": 0.024778541992418468, + "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_game1]": 0.02579708001576364, + "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_game2]": 0.025394369004061446, + "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_game3]": 0.023870316974353045, + "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_game4]": 0.02718632298638113, "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_utility0]": 0.025446541025303304, "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_utility1]": 0.026494102989090607, "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_utility2]": 0.02477889700094238, "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_utility3]": 0.026450325007317588, "tests/value/least_core/test_naive.py::test_naive_least_core[True-test_utility4]": 0.026973432017257437, - "tests/value/loo/test_loo.py::test_loo[100]": 3.7793434759951197, - "tests/value/loo/test_loo.py::test_loo[10]": 3.8455980509752408, - "tests/value/shapley/test_classwise.py::test_classwise_scorer_accuracies_left_right_margins[101-0.3-0.4]": 0.004718418029369786, - "tests/value/shapley/test_classwise.py::test_classwise_scorer_accuracies_manual_derivation": 0.022209248010767624, - "tests/value/shapley/test_classwise.py::test_classwise_scorer_is_symmetric[101-0.3-0.4]": 0.0053302829910535365, - "tests/value/shapley/test_classwise.py::test_classwise_scorer_representation": 0.002573036035755649, - "tests/value/shapley/test_classwise.py::test_classwise_scorer_utility[101-0.3-0.4]": 0.00688477698713541, - "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution-n_resample_complement_sets=1-n_samples=500]": 6.088012945023365, - "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution_no_default-n_resample_complement_sets=1-n_samples=500]": 6.90557194603025, - "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution_no_default_allow_empty_set-n_resample_complement_sets=1-n_samples=500]": 6.456796451995615, - "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution_normalized-n_resample_complement_sets=1-n_samples=500]": 5.917300594970584, - "tests/value/shapley/test_classwise.py::test_closed_form_linear_classifier": 0.004191815009107813, - "tests/value/shapley/test_knn.py::test_knn_montecarlo_match": 6.380129672033945, + "tests/value/loo/test_loo.py::test_loo[100]": 2.931964471034007, + "tests/value/loo/test_loo.py::test_loo[10]": 2.289035618014168, + "tests/value/shapley/test_classwise.py::test_classwise_scorer_accuracies_left_right_margins[101-0.3-0.4]": 0.005037697002990171, + "tests/value/shapley/test_classwise.py::test_classwise_scorer_accuracies_manual_derivation": 0.037136607978027314, + "tests/value/shapley/test_classwise.py::test_classwise_scorer_is_symmetric[101-0.3-0.4]": 0.006254975975025445, + "tests/value/shapley/test_classwise.py::test_classwise_scorer_representation": 0.0036907130270265043, + "tests/value/shapley/test_classwise.py::test_classwise_scorer_utility[101-0.3-0.4]": 0.009000057965749875, + "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution-n_resample_complement_sets=1-n_samples=500]": 5.354776905995095, + "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution_no_default-n_resample_complement_sets=1-n_samples=500]": 6.330135180032812, + "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution_no_default_allow_empty_set-n_resample_complement_sets=1-n_samples=500]": 6.155792267003562, + "tests/value/shapley/test_classwise.py::test_classwise_shapley[classwise_shapley_exact_solution_normalized-n_resample_complement_sets=1-n_samples=500]": 5.430430012987927, + "tests/value/shapley/test_classwise.py::test_closed_form_linear_classifier": 0.005138550972333178, + "tests/value/shapley/test_knn.py::test_knn_montecarlo_match": 2.8276229689945467, "tests/value/shapley/test_montecarlo.py::test_analytic_montecarlo_shapley[12-owen-0.1-0.0001-kwargs2]": 0.6999966300209053, "tests/value/shapley/test_montecarlo.py::test_analytic_montecarlo_shapley[12-owen_antithetic-0.1-0.0001-kwargs3]": 1.3923712590476498, "tests/value/shapley/test_montecarlo.py::test_analytic_montecarlo_shapley[12-permutation_montecarlo-0.1-1e-05-kwargs0]": 4.533932764985366, "tests/value/shapley/test_montecarlo.py::test_analytic_montecarlo_shapley[3-group_testing-0.1-0.01-kwargs4]": 2.874565462989267, "tests/value/shapley/test_montecarlo.py::test_analytic_montecarlo_shapley[8-combinatorial_montecarlo-0.2-0.0001-kwargs1]": 4.175152084033471, - "tests/value/shapley/test_montecarlo.py::test_grouped_linear_montecarlo_shapley[permutation_montecarlo-kwargs0-scorer0-0.1-2-0-21-2]": 5.129105891013751, - "tests/value/shapley/test_montecarlo.py::test_hoeffding_bound_montecarlo[combinatorial_montecarlo-6-0.1-0.1]": 4.910673014004715, - "tests/value/shapley/test_montecarlo.py::test_hoeffding_bound_montecarlo[permutation_montecarlo-6-0.1-0.1]": 52.25644952899893, + "tests/value/shapley/test_montecarlo.py::test_games[combinatorial_montecarlo-0.2-0.0001-kwargs1-test_game0]": 1.9197496420238167, + "tests/value/shapley/test_montecarlo.py::test_games[combinatorial_montecarlo-0.2-0.0001-kwargs1-test_game1]": 2.5275338669889607, + "tests/value/shapley/test_montecarlo.py::test_games[group_testing-0.1-0.01-kwargs4-test_game0]": 5.158969943964621, + "tests/value/shapley/test_montecarlo.py::test_games[group_testing-0.1-0.01-kwargs4-test_game1]": 6.070634126022924, + "tests/value/shapley/test_montecarlo.py::test_games[owen-0.2-0.0001-kwargs2-test_game0]": 0.5380362030118704, + "tests/value/shapley/test_montecarlo.py::test_games[owen-0.2-0.0001-kwargs2-test_game1]": 0.8358890759991482, + "tests/value/shapley/test_montecarlo.py::test_games[owen_antithetic-0.1-0.0001-kwargs3-test_game0]": 1.2963983940135222, + "tests/value/shapley/test_montecarlo.py::test_games[owen_antithetic-0.1-0.0001-kwargs3-test_game1]": 2.021286355011398, + "tests/value/shapley/test_montecarlo.py::test_games[permutation_montecarlo-0.2-0.0001-kwargs0-test_game0]": 3.723641743970802, + "tests/value/shapley/test_montecarlo.py::test_games[permutation_montecarlo-0.2-0.0001-kwargs0-test_game1]": 3.8597429619985633, + "tests/value/shapley/test_montecarlo.py::test_grouped_linear_montecarlo_shapley[permutation_montecarlo-kwargs0-scorer0-0.1-2-0-21-2]": 5.106256189988926, + "tests/value/shapley/test_montecarlo.py::test_hoeffding_bound_montecarlo[combinatorial_montecarlo-6-0.1-0.1]": 0.001203357009217143, + "tests/value/shapley/test_montecarlo.py::test_hoeffding_bound_montecarlo[permutation_montecarlo-6-0.1-0.1]": 54.87148774100933, "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_shapley[combinatorial_montecarlo-kwargs1-scorer0-0.25-2-0-21]": 17.78464582102606, "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_shapley[group_testing-kwargs4-scorer0-0.25-2-0-21]": 29.239474696019897, "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_shapley[owen-kwargs2-scorer0-0.25-2-0-21]": 4.124498174991459, "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_shapley[owen_antithetic-kwargs3-scorer0-0.25-2-0-21]": 7.887545032019261, "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_shapley[permutation_montecarlo-kwargs0-scorer0-0.25-2-0-21]": 5.8485472809989005, - "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[group_testing-kwargs3-scorer0-0.2-2-0-21]": 30.232708652998554, - "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[owen-kwargs1-scorer0-0.2-2-0-21]": 13.355578221991891, - "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[owen_antithetic-kwargs2-scorer0-0.2-2-0-21]": 20.621750775026157, - "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[permutation_montecarlo-kwargs0-scorer0-0.2-2-0-21]": 5.888187222008128, + "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[group_testing-kwargs3-scorer0-0.2-2-0-21]": 99.55651604299783, + "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[owen-kwargs1-scorer0-0.2-2-0-21]": 34.54854124702979, + "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[owen_antithetic-kwargs2-scorer0-0.2-2-0-21]": 68.9907481589762, + "tests/value/shapley/test_montecarlo.py::test_linear_montecarlo_with_outlier[permutation_montecarlo-kwargs0-scorer0-0.2-2-0-21]": 6.065520199976163, "tests/value/shapley/test_montecarlo.py::test_montecarlo_shapley_housing_dataset[12-3-12-combinatorial_montecarlo-kwargs0]": 0.16786966001382098, "tests/value/shapley/test_montecarlo.py::test_montecarlo_shapley_housing_dataset[12-3-12-owen-kwargs1]": 17.011920137971174, "tests/value/shapley/test_montecarlo.py::test_montecarlo_shapley_housing_dataset[12-3-12-owen_antithetic-kwargs2]": 35.88025256394758, "tests/value/shapley/test_montecarlo.py::test_montecarlo_shapley_housing_dataset[12-3-4-group_testing-kwargs3]": 0.25901710899779573, + "tests/value/shapley/test_montecarlo.py::test_seed[combinatorial_montecarlo-kwargs0-test_game0]": 0.015400617005070671, + "tests/value/shapley/test_montecarlo.py::test_seed[group_testing-kwargs3-test_game0]": 0.4990526599576697, + "tests/value/shapley/test_montecarlo.py::test_seed[owen-kwargs1-test_game0]": 2.7092463899753056, + "tests/value/shapley/test_montecarlo.py::test_seed[owen_antithetic-kwargs2-test_game0]": 6.592550466011744, "tests/value/shapley/test_naive.py::test_analytic_exact_shapley[12-combinatorial_exact_shapley-0.01-1e-05]": 2.798590613005217, "tests/value/shapley/test_naive.py::test_analytic_exact_shapley[6-permutation_exact_shapley-0.01-1e-05]": 0.34537768000154756, - "tests/value/shapley/test_naive.py::test_grouped_linear[2-0-50-3-r2]": 0.057835308980429545, - "tests/value/shapley/test_naive.py::test_grouped_linear[2-1-100-5-explained_variance]": 1.2154581100330688, - "tests/value/shapley/test_naive.py::test_grouped_linear[2-1-100-5-r2]": 1.1950475970224943, + "tests/value/shapley/test_naive.py::test_games[combinatorial_exact_shapley-test_game0-0.1-1e-05]": 0.013095707981847227, + "tests/value/shapley/test_naive.py::test_games[combinatorial_exact_shapley-test_game1-0.1-1e-05]": 0.010749600012786686, + "tests/value/shapley/test_naive.py::test_games[combinatorial_exact_shapley-test_game2-0.1-1e-05]": 0.009951793035725132, + "tests/value/shapley/test_naive.py::test_games[combinatorial_exact_shapley-test_game3-0.1-1e-05]": 0.0090146989969071, + "tests/value/shapley/test_naive.py::test_games[combinatorial_exact_shapley-test_game4-0.1-1e-05]": 0.03628412194666453, + "tests/value/shapley/test_naive.py::test_games[permutation_exact_shapley-test_game0-0.1-1e-05]": 0.017009111965307966, + "tests/value/shapley/test_naive.py::test_games[permutation_exact_shapley-test_game1-0.1-1e-05]": 0.010189941996941343, + "tests/value/shapley/test_naive.py::test_games[permutation_exact_shapley-test_game2-0.1-1e-05]": 0.01022218499565497, + "tests/value/shapley/test_naive.py::test_games[permutation_exact_shapley-test_game3-0.1-1e-05]": 0.009432713995920494, + "tests/value/shapley/test_naive.py::test_games[permutation_exact_shapley-test_game4-0.1-1e-05]": 0.5037095320003573, + "tests/value/shapley/test_naive.py::test_grouped_linear[2-0-50-3-r2]": 0.11044956598198041, + "tests/value/shapley/test_naive.py::test_grouped_linear[2-1-100-5-explained_variance]": 2.3892422189819627, + "tests/value/shapley/test_naive.py::test_grouped_linear[2-1-100-5-r2]": 2.3473940060066525, "tests/value/shapley/test_naive.py::test_linear[2-0-10-r2]": 0.05533879197901115, "tests/value/shapley/test_naive.py::test_linear[2-1-10-explained_variance]": 0.058987755968701094, "tests/value/shapley/test_naive.py::test_linear[2-1-10-neg_median_absolute_error]": 0.05515471697435714, "tests/value/shapley/test_naive.py::test_linear[2-1-10-r2]": 0.05683578198659234, - "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-0-20-r2]": 7.4271527160017285, - "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-1-20-explained_variance]": 7.752014733996475, - "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-1-20-neg_median_absolute_error]": 7.2494586749817245, - "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-1-20-r2]": 7.528596303978702, - "tests/value/shapley/test_naive.py::test_polynomial[coefficients0-r2]": 0.10091358600766398, - "tests/value/shapley/test_naive.py::test_polynomial[coefficients1-neg_median_absolute_error]": 0.09756919997744262, - "tests/value/shapley/test_naive.py::test_polynomial[coefficients2-explained_variance]": 0.10092617000918835, - "tests/value/shapley/test_naive.py::test_polynomial_with_outlier[coefficients0-r2]": 0.05707916300161742, - "tests/value/shapley/test_naive.py::test_polynomial_with_outlier[coefficients1-neg_median_absolute_error]": 0.058802402985747904, - "tests/value/shapley/test_naive.py::test_polynomial_with_outlier[coefficients2-explained_variance]": 0.06408755297889002, + "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-0-20-r2]": 15.295730330020888, + "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-1-20-explained_variance]": 15.311946107976837, + "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-1-20-neg_median_absolute_error]": 15.246478994027711, + "tests/value/shapley/test_naive.py::test_linear_with_outlier[2-1-20-r2]": 15.122833114990499, + "tests/value/shapley/test_naive.py::test_polynomial[coefficients0-r2]": 0.12686681901686825, + "tests/value/shapley/test_naive.py::test_polynomial[coefficients1-neg_median_absolute_error]": 0.11067792598623782, + "tests/value/shapley/test_naive.py::test_polynomial[coefficients2-explained_variance]": 0.11747662501875311, + "tests/value/shapley/test_naive.py::test_polynomial_with_outlier[coefficients0-r2]": 0.08565683299093507, + "tests/value/shapley/test_naive.py::test_polynomial_with_outlier[coefficients1-neg_median_absolute_error]": 0.08773901200038381, + "tests/value/shapley/test_naive.py::test_polynomial_with_outlier[coefficients2-explained_variance]": 0.09032825799658895, + "tests/value/shapley/test_truncated.py::test_games[done0-NoTruncation-truncation_kwargs0-test_game0]": 3.9726052110199817, + "tests/value/shapley/test_truncated.py::test_games[done0-NoTruncation-truncation_kwargs0-test_game1]": 4.48901929197018, + "tests/value/shapley/test_truncated.py::test_games[done1-FixedTruncation-truncation_kwargs1-test_game0]": 4.009335360024124, + "tests/value/shapley/test_truncated.py::test_games[done1-FixedTruncation-truncation_kwargs1-test_game1]": 4.109410956996726, "tests/value/shapley/test_truncated.py::test_tmcs_analytic_montecarlo_shapley[12-truncated_montecarlo-0.1-1e-05-kwargs0]": 5.025441929989029, "tests/value/shapley/test_truncated.py::test_tmcs_linear_montecarlo_shapley[truncated_montecarlo-kwargs0-scorer0-0.25-2-0-21]": 5.633914494974306, - "tests/value/shapley/test_truncated.py::test_tmcs_linear_montecarlo_with_outlier[truncated_montecarlo-kwargs0-scorer0-0.2-2-0-21]": 3.523623990971828, - "tests/value/test_sampler.py::test_chunkify[AntitheticSampler]": 0.0012030639918521047, - "tests/value/test_sampler.py::test_chunkify[DeterministicUniformSampler]": 0.0011419990041758865, - "tests/value/test_sampler.py::test_chunkify[RandomHierarchicalSampler]": 0.0011900250101462007, - "tests/value/test_sampler.py::test_chunkify[UniformSampler]": 0.0013321389851626009, - "tests/value/test_sampler.py::test_chunkify_permutation[DeterministicPermutationSampler]": 0.0010862670314963907, - "tests/value/test_sampler.py::test_chunkify_permutation[PermutationSampler]": 0.001125522016081959, - "tests/value/test_sampler.py::test_proper[indices0-AntitheticSampler]": 0.0011964229634031653, - "tests/value/test_sampler.py::test_proper[indices0-DeterministicPermutationSampler]": 0.0013584279513452202, - "tests/value/test_sampler.py::test_proper[indices0-DeterministicUniformSampler]": 0.0013845030043739825, - "tests/value/test_sampler.py::test_proper[indices0-PermutationSampler]": 0.0012692750024143606, - "tests/value/test_sampler.py::test_proper[indices0-RandomHierarchicalSampler]": 0.0011780599888879806, - "tests/value/test_sampler.py::test_proper[indices0-UniformSampler]": 0.0012423349835444242, - "tests/value/test_sampler.py::test_proper[indices1-AntitheticSampler]": 0.001568679028423503, - "tests/value/test_sampler.py::test_proper[indices1-DeterministicPermutationSampler]": 0.0013892220158595592, - "tests/value/test_sampler.py::test_proper[indices1-DeterministicUniformSampler]": 0.0014415960176847875, - "tests/value/test_sampler.py::test_proper[indices1-PermutationSampler]": 0.0012552720145322382, - "tests/value/test_sampler.py::test_proper[indices1-RandomHierarchicalSampler]": 0.0017029709706548601, - "tests/value/test_sampler.py::test_proper[indices1-UniformSampler]": 0.0015911830123513937, - "tests/value/test_sampler.py::test_proper_reproducible[indices0-AntitheticSampler]": 0.0014955719816498458, - "tests/value/test_sampler.py::test_proper_reproducible[indices0-PermutationSampler]": 0.0017780059715732932, - "tests/value/test_sampler.py::test_proper_reproducible[indices0-RandomHierarchicalSampler]": 0.0015286150155588984, - "tests/value/test_sampler.py::test_proper_reproducible[indices0-UniformSampler]": 0.0013392769906204194, - "tests/value/test_sampler.py::test_proper_reproducible[indices1-AntitheticSampler]": 0.005814862961415201, - "tests/value/test_sampler.py::test_proper_reproducible[indices1-PermutationSampler]": 0.0022604400001000613, - "tests/value/test_sampler.py::test_proper_reproducible[indices1-RandomHierarchicalSampler]": 0.01281771101639606, - "tests/value/test_sampler.py::test_proper_reproducible[indices1-UniformSampler]": 0.006939170008990914, - "tests/value/test_sampler.py::test_proper_stochastic[indices0-AntitheticSampler]": 0.001301849988522008, - "tests/value/test_sampler.py::test_proper_stochastic[indices0-PermutationSampler]": 0.0013378779985941947, - "tests/value/test_sampler.py::test_proper_stochastic[indices0-RandomHierarchicalSampler]": 0.0014513320056721568, - "tests/value/test_sampler.py::test_proper_stochastic[indices0-UniformSampler]": 0.0014353079604916275, - "tests/value/test_sampler.py::test_proper_stochastic[indices1-AntitheticSampler]": 0.006029498006682843, - "tests/value/test_sampler.py::test_proper_stochastic[indices1-PermutationSampler]": 0.0019644349522423, - "tests/value/test_sampler.py::test_proper_stochastic[indices1-RandomHierarchicalSampler]": 0.012361108005279675, - "tests/value/test_sampler.py::test_proper_stochastic[indices1-UniformSampler]": 0.006347205984639004, - "tests/value/test_semivalues.py::test_banzhaf[AntitheticPermutationSampler-5]": 10.714197647990659, - "tests/value/test_semivalues.py::test_banzhaf[AntitheticSampler-5]": 4.695468286023242, - "tests/value/test_semivalues.py::test_banzhaf[DeterministicPermutationSampler-5]": 6.074063064996153, - "tests/value/test_semivalues.py::test_banzhaf[DeterministicUniformSampler-5]": 4.212341544014635, - "tests/value/test_semivalues.py::test_banzhaf[PermutationSampler-5]": 8.149094285006868, - "tests/value/test_semivalues.py::test_banzhaf[UniformSampler-5]": 4.764893947984092, - "tests/value/test_semivalues.py::test_coefficients[banzhaf_coefficient-100]": 0.003842581994831562, - "tests/value/test_semivalues.py::test_coefficients[banzhaf_coefficient-10]": 0.0032151709601748735, - "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w0-100]": 0.004444399964995682, - "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w0-10]": 0.003756532969418913, - "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w1-100]": 0.004344976012362167, - "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w1-10]": 0.003551592002622783, - "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w2-100]": 0.004556107014650479, - "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w2-10]": 0.0035066070267930627, - "tests/value/test_semivalues.py::test_coefficients[shapley_coefficient-100]": 0.0047601540281903, - "tests/value/test_semivalues.py::test_coefficients[shapley_coefficient-10]": 0.0030498180130962282, + "tests/value/shapley/test_truncated.py::test_tmcs_linear_montecarlo_with_outlier[truncated_montecarlo-kwargs0-scorer0-0.2-2-0-21]": 3.0232558990246616, + "tests/value/test_sampler.py::test_chunkify[AntitheticSampler]": 0.002352299023186788, + "tests/value/test_sampler.py::test_chunkify[DeterministicUniformSampler]": 0.002171223022742197, + "tests/value/test_sampler.py::test_chunkify[RandomHierarchicalSampler]": 0.0020702430338133126, + "tests/value/test_sampler.py::test_chunkify[UniformSampler]": 0.0028199770022183657, + "tests/value/test_sampler.py::test_chunkify_permutation[DeterministicPermutationSampler]": 0.002369680005358532, + "tests/value/test_sampler.py::test_chunkify_permutation[PermutationSampler]": 0.002037067955825478, + "tests/value/test_sampler.py::test_proper[indices0-AntitheticSampler]": 0.002861024026060477, + "tests/value/test_sampler.py::test_proper[indices0-DeterministicPermutationSampler]": 0.002655454009072855, + "tests/value/test_sampler.py::test_proper[indices0-DeterministicUniformSampler]": 0.003174368990585208, + "tests/value/test_sampler.py::test_proper[indices0-PermutationSampler]": 0.0024646950187161565, + "tests/value/test_sampler.py::test_proper[indices0-RandomHierarchicalSampler]": 0.0023855049803387374, + "tests/value/test_sampler.py::test_proper[indices0-UniformSampler]": 0.0028759060078300536, + "tests/value/test_sampler.py::test_proper[indices1-AntitheticSampler]": 0.003161819011438638, + "tests/value/test_sampler.py::test_proper[indices1-DeterministicPermutationSampler]": 0.002913616015575826, + "tests/value/test_sampler.py::test_proper[indices1-DeterministicUniformSampler]": 0.00358530500670895, + "tests/value/test_sampler.py::test_proper[indices1-PermutationSampler]": 0.0029439219797495753, + "tests/value/test_sampler.py::test_proper[indices1-RandomHierarchicalSampler]": 0.004866360017331317, + "tests/value/test_sampler.py::test_proper[indices1-UniformSampler]": 0.003348411002662033, + "tests/value/test_sampler.py::test_proper_reproducible[indices0-AntitheticSampler]": 0.0021045640169177204, + "tests/value/test_sampler.py::test_proper_reproducible[indices0-PermutationSampler]": 0.002255333965877071, + "tests/value/test_sampler.py::test_proper_reproducible[indices0-RandomHierarchicalSampler]": 0.002645997970830649, + "tests/value/test_sampler.py::test_proper_reproducible[indices0-UniformSampler]": 0.002439948031678796, + "tests/value/test_sampler.py::test_proper_reproducible[indices1-AntitheticSampler]": 0.012846849975176156, + "tests/value/test_sampler.py::test_proper_reproducible[indices1-PermutationSampler]": 0.0033622210030443966, + "tests/value/test_sampler.py::test_proper_reproducible[indices1-RandomHierarchicalSampler]": 0.016803934995550662, + "tests/value/test_sampler.py::test_proper_reproducible[indices1-UniformSampler]": 0.009853368013864383, + "tests/value/test_sampler.py::test_proper_stochastic[indices0-AntitheticSampler]": 0.002318738988833502, + "tests/value/test_sampler.py::test_proper_stochastic[indices0-PermutationSampler]": 0.0024791909963823855, + "tests/value/test_sampler.py::test_proper_stochastic[indices0-RandomHierarchicalSampler]": 0.0026730689860414714, + "tests/value/test_sampler.py::test_proper_stochastic[indices0-UniformSampler]": 0.0024485259782522917, + "tests/value/test_sampler.py::test_proper_stochastic[indices1-AntitheticSampler]": 0.011456050968263298, + "tests/value/test_sampler.py::test_proper_stochastic[indices1-PermutationSampler]": 0.004046668007504195, + "tests/value/test_sampler.py::test_proper_stochastic[indices1-RandomHierarchicalSampler]": 0.017887227004393935, + "tests/value/test_sampler.py::test_proper_stochastic[indices1-UniformSampler]": 0.013727245997870341, + "tests/value/test_semivalues.py::test_banzhaf[AntitheticPermutationSampler-5]": 8.8424951180059, + "tests/value/test_semivalues.py::test_banzhaf[AntitheticSampler-5]": 3.934585579962004, + "tests/value/test_semivalues.py::test_banzhaf[DeterministicPermutationSampler-5]": 4.930636252975091, + "tests/value/test_semivalues.py::test_banzhaf[DeterministicUniformSampler-5]": 3.3139901740069035, + "tests/value/test_semivalues.py::test_banzhaf[PermutationSampler-5]": 7.1331721209862735, + "tests/value/test_semivalues.py::test_banzhaf[UniformSampler-5]": 3.9336799690208863, + "tests/value/test_semivalues.py::test_coefficients[banzhaf_coefficient-100]": 0.004235609987517819, + "tests/value/test_semivalues.py::test_coefficients[banzhaf_coefficient-10]": 0.0035420969943515956, + "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w0-100]": 0.004731877997983247, + "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w0-10]": 0.004268383985618129, + "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w1-100]": 0.00397408299613744, + "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w1-10]": 0.0036294079618528485, + "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w2-100]": 0.004120993980905041, + "tests/value/test_semivalues.py::test_coefficients[beta_coefficient_w2-10]": 0.003885133017320186, + "tests/value/test_semivalues.py::test_coefficients[shapley_coefficient-100]": 0.004692697984864935, + "tests/value/test_semivalues.py::test_coefficients[shapley_coefficient-10]": 0.0032941289828158915, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-AntitheticPermutationSampler-test_game0]": 10.938828482991084, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-AntitheticPermutationSampler-test_game1]": 9.457341374014504, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-AntitheticSampler-test_game0]": 11.300962019013241, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-AntitheticSampler-test_game1]": 9.835019734979142, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-PermutationSampler-test_game0]": 10.733634837990394, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-PermutationSampler-test_game1]": 9.321977876999881, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-UniformSampler-test_game0]": 11.094729618023848, + "tests/value/test_semivalues.py::test_games_shapley[beta_coefficient_w-UniformSampler-test_game1]": 9.7448798789992, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-AntitheticPermutationSampler-test_game0]": 7.972334994003177, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-AntitheticPermutationSampler-test_game1]": 6.824364921019878, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-AntitheticSampler-test_game0]": 8.056306425016373, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-AntitheticSampler-test_game1]": 7.130653911037371, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-PermutationSampler-test_game0]": 7.846854715025984, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-PermutationSampler-test_game1]": 6.972198699979344, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-UniformSampler-test_game0]": 8.307511443999829, + "tests/value/test_semivalues.py::test_games_shapley[shapley_coefficient-UniformSampler-test_game1]": 8.480023446987616, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicPermutationSampler-test_game0]": 3.3534141020209063, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicPermutationSampler-test_game1]": 2.9377814330218825, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicPermutationSampler-test_game2]": 3.0197440950141754, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicPermutationSampler-test_game3]": 3.065462252998259, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicUniformSampler-test_game0]": 2.530866265995428, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicUniformSampler-test_game1]": 2.936823854019167, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicUniformSampler-test_game2]": 3.0397010059969034, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[beta_coefficient_w-DeterministicUniformSampler-test_game3]": 2.9198287760373205, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicPermutationSampler-test_game0]": 2.0015553509874735, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicPermutationSampler-test_game1]": 3.1432031750155147, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicPermutationSampler-test_game2]": 2.8129150270251557, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicPermutationSampler-test_game3]": 3.019237435044488, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicUniformSampler-test_game0]": 2.2434276760031935, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicUniformSampler-test_game1]": 3.035445230983896, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicUniformSampler-test_game2]": 2.9089672839909326, + "tests/value/test_semivalues.py::test_games_shapley_deterministic[shapley_coefficient-DeterministicUniformSampler-test_game3]": 3.0945032040181104, + "tests/value/test_semivalues.py::test_marginal_batch_size[PermutationSampler-beta_coefficient_w-5-test_game0]": 0.006125468004029244, "tests/value/test_semivalues.py::test_shapley[beta_coefficient_w-AntitheticPermutationSampler-5]": 5.1298250389809255, "tests/value/test_semivalues.py::test_shapley[beta_coefficient_w-AntitheticSampler-5]": 21.97495059997891, "tests/value/test_semivalues.py::test_shapley[beta_coefficient_w-DeterministicPermutationSampler-5]": 5.294114143965999, @@ -460,18 +969,20 @@ "tests/value/test_semivalues.py::test_shapley[shapley_coefficient-DeterministicUniformSampler-5]": 3.263753114035353, "tests/value/test_semivalues.py::test_shapley[shapley_coefficient-PermutationSampler-5]": 4.766259174008155, "tests/value/test_semivalues.py::test_shapley[shapley_coefficient-UniformSampler-5]": 8.919797526003094, + "tests/value/test_semivalues.py::test_shapley_batch_size[1-PermutationSampler-beta_coefficient_w-5-test_game0]": 5.223603894002736, + "tests/value/test_semivalues.py::test_shapley_batch_size[2-PermutationSampler-beta_coefficient_w-5-test_game0]": 5.565272415027721, "tests/value/test_semivalues.py::test_shapley_batch_size[5-PermutationSampler-beta_coefficient_w-5]": 9.19877936199191, - "tests/value/test_stopping.py::test_history_deviation[0.01-100]": 0.7586702810076531, - "tests/value/test_stopping.py::test_history_deviation[0.01-1]": 0.01646678801625967, - "tests/value/test_stopping.py::test_history_deviation[0.01-42]": 0.35505866500898264, - "tests/value/test_stopping.py::test_history_deviation[0.05-100]": 0.15892104100203142, - "tests/value/test_stopping.py::test_history_deviation[0.05-1]": 0.003904131968738511, - "tests/value/test_stopping.py::test_history_deviation[0.05-42]": 0.06365110300248489, - "tests/value/test_stopping.py::test_make_criterion": 0.0067943750182166696, - "tests/value/test_stopping.py::test_max_checks": 0.0022287879837676883, - "tests/value/test_stopping.py::test_max_time": 0.30431480798870325, - "tests/value/test_stopping.py::test_minmax_updates": 0.003805230953730643, - "tests/value/test_stopping.py::test_standard_error": 0.003371614031493664, - "tests/value/test_stopping.py::test_stopping_criterion": 0.004461375967366621, - "tests/value/test_stopping.py::test_stopping_criterion_composition": 0.007468684023479 + "tests/value/test_stopping.py::test_history_deviation[0.01-100]": 0.9386799350031652, + "tests/value/test_stopping.py::test_history_deviation[0.01-1]": 0.016687404015101492, + "tests/value/test_stopping.py::test_history_deviation[0.01-42]": 0.34465136696235277, + "tests/value/test_stopping.py::test_history_deviation[0.05-100]": 0.197514128027251, + "tests/value/test_stopping.py::test_history_deviation[0.05-1]": 0.005374073953134939, + "tests/value/test_stopping.py::test_history_deviation[0.05-42]": 0.08412943602888845, + "tests/value/test_stopping.py::test_make_criterion": 0.005493109958479181, + "tests/value/test_stopping.py::test_max_checks": 0.0034617370110936463, + "tests/value/test_stopping.py::test_max_time": 0.30653654897469096, + "tests/value/test_stopping.py::test_minmax_updates": 0.0035910180013161153, + "tests/value/test_stopping.py::test_standard_error": 0.00415661497390829, + "tests/value/test_stopping.py::test_stopping_criterion": 0.0028504370129667222, + "tests/value/test_stopping.py::test_stopping_criterion_composition": 0.005712636018870398 } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index afe61ac67..a9cd7bb29 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,17 +1,68 @@ # Changelog -## Unreleased +## Unreleased 🆕 New influence method, better docs and bugfixes 📚🐞 + +### Added + +- New method: `NystroemSketchInfluence` + [PR #504](https://github.com/aai-institute/pyDVL/pull/504) +- New preconditioned block variant of conjugate gradient + [PR #507](https://github.com/aai-institute/pyDVL/pull/507) +- Improvements to documentation: fixes, links, text, example gallery, LFS and + more. [PR #532](https://github.com/aai-institute/pyDVL/pull/532), + [PR #543](https://github.com/aai-institute/pyDVL/pull/543) +- Glossary of data valuation and influence terms in the documentation + [PR #537](https://github.com/aai-institute/pyDVL/pull/537 ### Fixed -- Bug in using `DaskInfluenceCalcualator` with `TorchnumpyConverter` - for single dimensional arrays [PR #485](https://github.com/aai-institute/pyDVL/pull/485) -- Fix implementations of `to` methods of `TorchInfluenceFunctionModel` implementations - [PR #487](https://github.com/aai-institute/pyDVL/pull/487) +- Bug in `LissaInfluence`, when not using CPU device + [PR #495](https://github.com/aai-institute/pyDVL/pull/495) +- Memory issue with `CgInfluence` and `ArnoldiInfluence` + [PR #498](https://github.com/aai-institute/pyDVL/pull/498) +- Raising specific error message with install instruction, when trying to load + `pydvl.utils.cache.memcached` without `pymemcache` installed. + If `pymemcache` is available, all symbols from `pydvl.utils.cache.memcached` + are available through `pydvl.utils.cache` + [PR #509](https://github.com/aai-institute/pyDVL/pull/509) + +### Changed + +- Add property `model_dtype` to instances of type `TorchInfluenceFunctionModel` +- Bump versions of CI actions to avoid warnings + [PR #502](https://github.com/aai-institute/pyDVL/pull/502) +- Add Python Version 3.11 to supported versions + [PR #510](https://github.com/aai-institute/pyDVL/pull/510) +- Documentation improvements and cleanup + [PR #521](https://github.com/aai-institute/pyDVL/pull/521), + [PR #522](https://github.com/aai-institute/pyDVL/pull/522) + +## 0.8.1 - 🆕 🏗 New method and notebook, Games with exact shapley values, bug fixes and cleanup + +### Added + - Implement new method: `EkfacInfluence` - [PR #476](https://github.com/aai-institute/pyDVL/pull/476) + [PR #451](https://github.com/aai-institute/pyDVL/issues/451) - New notebook to showcase ekfac for LLMs [PR #483](https://github.com/aai-institute/pyDVL/pull/483) +- Implemented exact games in Castro et al. 2009 and 2017 + [PR #341](https://github.com/appliedAI-Initiative/pyDVL/pull/341) + +### Fixed + +- Bug in using `DaskInfluenceCalcualator` with `TorchnumpyConverter` + for single dimensional arrays + [PR #485](https://github.com/aai-institute/pyDVL/pull/485) +- Fix implementations of `to` methods of `TorchInfluenceFunctionModel` + implementations [PR #487](https://github.com/aai-institute/pyDVL/pull/487) +- Fixed bug with checking for converged values in semivalues + [PR #341](https://github.com/appliedAI-Initiative/pyDVL/pull/341) + +### Changed + +- Add applications of data valuation section, display examples more prominently, + make all sections visible in table of contents, use mkdocs material cards + in the home page [PR #492](https://github.com/aai-institute/pyDVL/pull/492) ## 0.8.0 - 🆕 New interfaces, scaling computation, bug fixes and improvements 🎁 @@ -22,7 +73,8 @@ - New influence function interface `InfluenceFunctionModel` - Data parallel computation with `DaskInfluenceCalculator` [PR #26](https://github.com/aai-institute/pyDVL/issues/26) -- Sequential batch-wise computation and write to disk with `SequentialInfluenceCalculator` +- Sequential batch-wise computation and write to disk with + `SequentialInfluenceCalculator` [PR #377](https://github.com/aai-institute/pyDVL/issues/377) - Adapt notebooks to new influence abstractions [PR #430](https://github.com/aai-institute/pyDVL/issues/430) @@ -62,6 +114,11 @@ - Faster semi-value computation with per-index check of stopping criteria (optional) [PR #437](https://github.com/aai-institute/pyDVL/pull/437) +### Fixed + +- Fix initialization of `data_names` in `ValuationResult.zeros()` + [PR #443](https://github.com/aai-institute/pyDVL/pull/443) + ### Changed - No longer using docker within tests to start a memcached server @@ -73,12 +130,6 @@ - Refactoring of parallel module. Old imports will stop working in v0.9.0 [PR #421](https://github.com/aai-institute/pyDVL/pull/421) -### Fixed - -- Fix initialization of `data_names` in `ValuationResult.zeros()` - [PR #443](https://github.com/aai-institute/pyDVL/pull/443) - - ## 0.7.0 - 📚🆕 Documentation and IF overhaul, new methods and bug fixes 💥🐞 This is our first β release! We have worked hard to deliver improvements across diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 198c7ded3..d2f47395f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,7 +11,7 @@ improvements to the currently implemented methods and other ideas. Please open a ticket with yours. If you are interested in setting up a similar project, consider the template -[pymetrius](https://github.com/aai-institute/pymetrius). +[pymetrius](https://github.com/appliedAI-Initiative/pymetrius). ## Local development @@ -23,16 +23,22 @@ to make your life easier. Run the following to set up the pre-commit git hook to run before pushes: -```shell script +```shell pre-commit install --hook-type pre-push ``` +Additionally, we use Git LFS for some files like images. Install with + +```shell +git lfs install +``` + ## Setting up your environment We strongly suggest using some form of virtual environment for working with the library. E.g. with venv: -```shell script +```shell python -m venv ./venv . venv/bin/activate # `venv\Scripts\activate` in windows pip install -r requirements-dev.txt -r requirements-docs.txt @@ -40,7 +46,7 @@ pip install -r requirements-dev.txt -r requirements-docs.txt With conda: -```shell script +```shell conda create -n pydvl python=3.8 conda activate pydvl pip install -r requirements-dev.txt -r requirements-docs.txt @@ -49,16 +55,20 @@ pip install -r requirements-dev.txt -r requirements-docs.txt A very convenient way of working with your library during development is to install it in editable mode into your environment by running -```shell script +```shell pip install -e . ``` In order to build the documentation locally (which is done as part of the tox -suite) [pandoc](https://pandoc.org/) is required. Except for OSX, it should be installed -automatically as a dependency with `requirements-docs.txt`. Under OSX you can -install pandoc (you'll need at least version 2.11) with: +suite) you need to install additional non-python dependencies as described in +the [documentation of mkdocs-material](https://squidfunk.github.io/mkdocs-material/plugins/requirements/image-processing/#cairo-graphics). -```shell script +In addition, [pandoc](https://pandoc.org/) is required. Except for OSX, +it should be installed automatically as a dependency with +`requirements-docs.txt`. Under OSX you can install pandoc +(you'll need at least version 2.11) with: + +```shell brew install pandoc ``` @@ -152,11 +162,11 @@ Two important markers are: To test the notebooks separately, run (see [below](#notebooks) for details): ```shell -tox -e tests -- notebooks/ +tox -e notebook-tests ``` To create a package locally, run: -```shell script +```shell python setup.py sdist bdist_wheel ``` @@ -239,12 +249,17 @@ This applies a simple CSS-filter to the output image of the cell. ## Documentation API documentation and examples from notebooks are built with -[mkdocs](https://www.mkdocs.org/), with versioning handled by +[mkdocs](https://www.mkdocs.org/), using a number of plugins, including +[mkdoctrings](https://mkdocstrings.github.io/), with versioning handled by [mike](https://github.com/jimporter/mike). Notebooks are an integral part of the documentation as well, please read [the section on notebooks](#notebooks) above. +If you want to build the documentation locally, please make sure you followed +the instructions in the section +[Setting up your environment](#setting-up-your-environment). + Use the following command to build the documentation the same way it is done in CI: @@ -262,6 +277,18 @@ mkdocs serve This will rebuild the documentation on changes to `.md` files inside `docs`, notebooks and python files. +On OSX, it is possible that the cairo lib file is not properly linked when installed +via homebrew. In this case you might encounter an error like this +```shell +OSError: no library called "cairo-2" was found +no library called "cairo" was found +no library called "libcairo-2" was found +``` +when calling `mkdocs build` or `mkdocs serve`. This can be resolved via setting +the environment variable `DYLD_FALLBACK_LIBRARY_PATH`: +```shell +export DYLD_FALLBACK_LIBRARY_PATH=$DYLD_FALLBACK_LIBRARY_PATH:/opt/homebrew/lib +``` ### Adding new pages @@ -272,19 +299,57 @@ pages are explicitly listed and manually arranged in the `nav` section of the configuration. +### Creating stable references for autorefs + +mkdocstrings includes the plugin +[autorefs](https://github.com/mkdocstrings/autorefs) to enable automatic linking +across pages with e.g. `[a link][to-something]`. Anchors are autogenerated +from section titles, and are not guaranteed to be unique. In order to ensure +that a link will remain valid, add a custom anchor to the section title: + +```markdown +## Some section { #permanent-anchor-to-some-section } +``` + +(note the space after the opening brace). You can then refer to it within +another markdown file with `[Some section][permanent-anchor-to-some-section]`. + + ### Using bibliography -Bibliographic citations are managed with the plugins -[mkdocs-bibtex]() and [...][]. -To enter a citation first add the entry to `docs/pydvl.bib`. For team -contributor this should be an export of the Zotero folder `software/pydvl` in -the [TransferLab Zotero library](https://www.zotero.org/groups/2703043/transferlab/library). -All other contributors just add the bibtex data, and a maintainer will add it to -the group library upon merging. +Bibliographic citations are managed with the plugin +[mkdocs-bibtex](https://github.com/shyamd/mkdocs-bibtex/). To enter a citation +first add the entry to `docs/pydvl.bib`. For team contributor this should be an +export of the Zotero folder `software/pydvl` in the [TransferLab Zotero +library](https://www.zotero.org/groups/2703043/transferlab/library). All other +contributors just add the bibtex data, and a maintainer will add it to the group +library upon merging. + +To add a citation inside a markdown file, use the notation `[@citekey]`. Alas, +because of when mkdocs-bibtex enters the pipeline, it won't process docstrings. +For module documentation, we manually inject html into the markdown files. For +example, in `pydvl.value.shapley.montecarlo` we have: + +```markdown +""" +Module docstring... + +## References + +[^1]: Ghorbani, A., Zou, J., 2019. + [Data Shapley: Equitable Valuation of Data for Machine + Learning](https://proceedings.mlr.press/v97/ghorbani19c.html). + In: Proceedings of the 36th International Conference on Machine Learning, + PMLR, pp. 2242–2251. +""" +``` -To add a citation inside a module or function's docstring, use the notation -`[@citekey]`. A references section is automatically added at the bottom of each -module's auto-generated documentation. +and then later in the file, inside a function's docstring: + +```markdown + This function implements (Ghorbani and Zou, 2019)1 +``` ### Writing mathematics @@ -298,18 +363,21 @@ exceptions). For simplicity, declare the string as "raw" with the prefix `r`: # This will work def f(x: float) -> float: r""" Computes - $$ f(x) = \frac{1}{x^2} $ + $${ f(x) = \frac{1}{x^2} }$$ """ return 1/(x*x) -# This throws an obscure sphinx error +# This throws an obscure error def f(x: float) -> float: """ Computes - $$ \frac{1}{x^2} $$ + $$\frac{1}{x^2}$$ """ return 1/(x*x) ``` +Note how there is no space after the dollar signs. This is important! You can +use braces for legibility like in the first example. + ### Abbreviations We keep the abbreviations used in the documentation inside the @@ -343,8 +411,12 @@ runs](#skipping-ci-runs)). 3. We split the tests based on their duration into groups and run them in parallel. For that we use [pytest-split](https://jerry-git.github.io/pytest-split) - to first store the duration of all tests with `pytest --store-durations pytest --slow-tests` + to first store the duration of all tests with + `tox -e tests -- --store-durations --slow-tests` in a `.test_durations` file. + + Alternatively, we case use pytest directly + `pytest --store-durations --slow-tests`. > **Note** This does not have to be done each time a new test or test case > is added. For new tests and test cases pytes-split assumes @@ -359,11 +431,14 @@ runs](#skipping-ci-runs)). Then we can have as many splits as we want: ```shell - pytest --splits 3 --group 1 - pytest --splits 3 --group 2 - pytest --splits 3 --group 3 + tox -e tests -- --splits 3 --group 1 + tox -e tests -- --splits 3 --group 2 + tox -e tests -- --splits 3 --group 3 ``` + Alternatively, we case use pytest directly + `pytest --splits 3 ---group 1`. + Each one of these commands should be run in a separate shell/job to run the test groups in parallel and decrease the total runtime. @@ -491,7 +566,8 @@ readme). In order to do this, simply prefix the commit message with `[skip ci]`. The string can be anywhere, but adding it to the beginning of the commit message makes it more evident when looking at commits in a PR. -Refer to the official [GitHub documentation](https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs) +Refer to the official [GitHub +documentation](https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs) for more information. ## Release processes @@ -510,13 +586,13 @@ Then, a new release can be created using the script `bumpversion` automatically derive the next release version by bumping the patch part): -```shell script +```shell build_scripts/release-version.sh 0.1.6 ``` To find out how to use the script, pass the `-h` or `--help` flags: -```shell script +```shell build_scripts/release-version.sh --help ``` @@ -542,7 +618,7 @@ create a new release manually by following these steps: 2. When ready to release: From the develop branch create the release branch and perform release activities (update changelog, news, ...). For your own convenience, define an env variable for the release version - ```shell script + ```shell export RELEASE_VERSION="vX.Y.Z" git checkout develop git branch release/${RELEASE_VERSION} && git checkout release/${RELEASE_VERSION} @@ -553,7 +629,7 @@ create a new release manually by following these steps: (the `release` part is ignored but required by bumpversion :rolling_eyes:). 4. Merge the release branch into `master`, tag the merge commit, and push back to the repo. The CI pipeline publishes the package based on the tagged commit. - ```shell script + ```shell git checkout master git merge --no-ff release/${RELEASE_VERSION} git tag -a ${RELEASE_VERSION} -m"Release ${RELEASE_VERSION}" @@ -564,14 +640,14 @@ create a new release manually by following these steps: always strictly more recent than the last published release version from `master`. 6. Merge the release branch into `develop`: - ```shell script + ```shell git checkout develop git merge --no-ff release/${RELEASE_VERSION} git push origin develop ``` 7. Delete the release branch if necessary: `git branch -d release/${RELEASE_VERSION}` -8. Create a Github +8. Create a GitHub [release](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository#creating-a-release) for the created tag. 9. Pour yourself a cup of coffee, you earned it! :coffee: :sparkles: @@ -601,8 +677,8 @@ a GitHub release. We use [bump2version](https://pypi.org/project/bump2version/) to bump the build part of the version number without commiting or tagging the change and then publish a package to TestPyPI from CI using Twine. The version -has the github run number appended. +has the GitHub run number appended. -For more details refer to the +For more details refer to the files [.github/workflows/publish.yaml](.github/workflows/publish.yaml) and -[.github/workflows/tox.yaml](.github/workflows/tox.yaml) files. +[.github/workflows/tox.yaml](.github/workflows/tox.yaml). diff --git a/README.md b/README.md index 948ffc842..2f7296018 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,9 @@ **pyDVL** collects algorithms for **Data Valuation** and **Influence Function** computation. +Refer to the [Methods](https://pydvl.org/devel/getting-started/methods/) +page of our documentation for a list of all implemented methods. + **Data Valuation** for machine learning is the task of assigning a scalar to each element of a training set which reflects its contribution to the final performance or outcome of some model trained on it. Some concepts of @@ -29,7 +32,7 @@ pyDVL focuses on model-dependent methods. width="70%" align="center" style="display: block; margin-left: auto; margin-right: auto;" - src="docs/value/img/mclc-best-removal-10k-natural.svg" + src="https://pydvl.org/devel/value/img/mclc-best-removal-10k-natural.svg" alt="best sample removal" />

@@ -48,7 +51,7 @@ of training samples over individual test points. width="70%" align="center" style="display: block; margin-left: auto; margin-right: auto;" - src="docs/assets/influence_functions_example.png" + src="https://pydvl.org/devel/examples/img/influence_functions_example.png" alt="best sample removal" />

@@ -72,10 +75,14 @@ You can also install the latest development version from pip install pyDVL --index-url https://test.pypi.org/simple/ ``` -pyDVL has also extra dependencies for certain functionalities (e.g. influence functions). +pyDVL has also extra dependencies for certain functionalities, +e.g. for using influence functions run +```shell +$ pip install pyDVL[influence] +``` For more instructions and information refer to [Installing pyDVL -](https://pydvl.org/stable/getting-started/installation/) in the +](https://pydvl.org/stable/getting-started/#installation) in the documentation. # Usage @@ -256,71 +263,6 @@ Please open new issues for bugs, feature requests and extensions. You can read about the structure of the project, the toolchain and workflow in the [guide for contributions](CONTRIBUTING.md). -# Papers - -We currently implement the following papers: - -## Data Valuation - -- Castro, Javier, Daniel Gómez, and Juan Tejada. [Polynomial Calculation of the - Shapley Value Based on Sampling](https://doi.org/10.1016/j.cor.2008.04.004). - Computers & Operations Research, Selected papers presented at the Tenth - International Symposium on Locational Decisions (ISOLDE X), 36, no. 5 (May 1, - 2009): 1726–30. -- Ghorbani, Amirata, and James Zou. [Data Shapley: Equitable Valuation of Data - for Machine Learning](http://proceedings.mlr.press/v97/ghorbani19c.html). In - International Conference on Machine Learning, 2242–51. PMLR, 2019. -- Wang, Tianhao, Yu Yang, and Ruoxi Jia. - [Improving Cooperative Game Theory-Based Data Valuation via Data Utility - Learning](https://doi.org/10.48550/arXiv.2107.06336). arXiv, 2022. -- Jia, Ruoxi, David Dao, Boxin Wang, Frances Ann Hubis, Nezihe Merve Gurel, Bo - Li, Ce Zhang, Costas Spanos, and Dawn Song. [Efficient Task-Specific Data - Valuation for Nearest Neighbor Algorithms](https://doi.org/10.14778/3342263.3342637). - Proceedings of the VLDB Endowment 12, no. 11 (1 July 2019): 1610–23. -- Okhrati, Ramin, and Aldo Lipani. [A Multilinear Sampling Algorithm to Estimate - Shapley Values](https://doi.org/10.1109/ICPR48806.2021.9412511). In 25th - International Conference on Pattern Recognition (ICPR 2020), 7992–99. IEEE, - 2021. -- Yan, T., and Procaccia, A. D. [If You Like Shapley Then You’ll Love the - Core](https://ojs.aaai.org/index.php/AAAI/article/view/16721). Proceedings of - the AAAI Conference on Artificial Intelligence, 35(6) (2021): 5751-5759. -- Jia, Ruoxi, David Dao, Boxin Wang, Frances Ann Hubis, Nick Hynes, Nezihe Merve - Gürel, Bo Li, Ce Zhang, Dawn Song, and Costas J. Spanos. [Towards Efficient - Data Valuation Based on the Shapley Value](http://proceedings.mlr.press/v89/jia19a.html). - In 22nd International Conference on Artificial Intelligence and Statistics, - 1167–76. PMLR, 2019. -- Wang, Jiachen T., and Ruoxi Jia. [Data Banzhaf: A Robust Data Valuation - Framework for Machine Learning](https://doi.org/10.48550/arXiv.2205.15466). - arXiv, October 22, 2022. -- Kwon, Yongchan, and James Zou. [Beta Shapley: A Unified and Noise-Reduced Data - Valuation Framework for Machine Learning](http://arxiv.org/abs/2110.14049). - In Proceedings of the 25th International Conference on Artificial Intelligence - and Statistics (AISTATS) 2022, Vol. 151. Valencia, Spain: PMLR, 2022. -- Kwon, Yongchan, and James Zou. [Data-OOB: Out-of-Bag Estimate as a Simple and - Efficient Data Value](https://proceedings.mlr.press/v202/kwon23e.html). In - Proceedings of the 40th International Conference on Machine Learning, 18135–52. - PMLR, 2023. -- Schoch, Stephanie, Haifeng Xu, and Yangfeng Ji. [CS-Shapley: Class-Wise - Shapley Values for Data Valuation in - Classification](https://openreview.net/forum?id=KTOcrOR5mQ9). In Proc. of the - Thirty-Sixth Conference on Neural Information Processing Systems (NeurIPS). - New Orleans, Louisiana, USA, 2022. - -## Influence Functions - -- Koh, Pang Wei, and Percy Liang. [Understanding Black-Box Predictions via - Influence Functions](http://proceedings.mlr.press/v70/koh17a.html). In - Proceedings of the 34th International Conference on Machine Learning, - 70:1885–94. Sydney, Australia: PMLR, 2017. -- Naman Agarwal, Brian Bullins, and Elad Hazan, [Second-Order Stochastic Optimization - for Machine Learning in Linear Time](https://www.jmlr.org/papers/v18/16-491.html), - Journal of Machine Learning Research 18 (2017): 1-40. -- Schioppa, Andrea, Polina Zablotskaia, David Vilar, and Artem Sokolov. - [Scaling Up Influence Functions](http://arxiv.org/abs/2112.03052). - In Proceedings of the AAAI-22. arXiv, 2021. -- James Martens, Roger Grosse, [Optimizing Neural Networks with Kronecker-factored Approximate Curvature](https://arxiv.org/abs/1503.05671), International Conference on Machine Learning (ICML), 2015. -- George, Thomas, César Laurent, Xavier Bouthillier, Nicolas Ballas, Pascal Vincent, [Fast Approximate Natural Gradient Descent in a Kronecker-factored Eigenbasis](https://arxiv.org/abs/1806.03884), Advances in Neural Information Processing Systems 31,2018. - # License pyDVL is distributed under diff --git a/build_scripts/copy_contributing_guide.py b/build_scripts/copy_contributing_guide.py new file mode 100644 index 000000000..0c4b2fbf2 --- /dev/null +++ b/build_scripts/copy_contributing_guide.py @@ -0,0 +1,38 @@ +import logging +import os +from pathlib import Path + +import mkdocs.plugins + +logger = logging.getLogger(__name__) + +root_dir = Path(__file__).parent.parent +docs_dir = root_dir / "docs" +contributing_file = root_dir / "CONTRIBUTING.md" +target_filepath = docs_dir / contributing_file.name + + +@mkdocs.plugins.event_priority(100) +def on_pre_build(config): + logger.info("Temporarily copying contributing guide to docs directory") + try: + if os.path.getmtime(contributing_file) <= os.path.getmtime(target_filepath): + logger.info( + f"Contributing guide '{os.fspath(contributing_file)}' hasn't been updated, skipping." + ) + return + except FileNotFoundError: + pass + logger.info( + f"Creating symbolic link for '{os.fspath(contributing_file)}' " + f"at '{os.fspath(target_filepath)}'" + ) + target_filepath.symlink_to(contributing_file) + + logger.info("Finished copying contributing guide to docs directory") + + +@mkdocs.plugins.event_priority(-100) +def on_shutdown(): + logger.info("Removing temporary contributing guide in docs directory") + target_filepath.unlink() diff --git a/build_scripts/modify_binder_link.py b/build_scripts/modify_binder_link.py index a01da10b5..eb09ea02b 100644 --- a/build_scripts/modify_binder_link.py +++ b/build_scripts/modify_binder_link.py @@ -13,6 +13,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Literal, Optional +from bs4 import BeautifulSoup from git import Repo from mkdocs.plugins import Config, event_priority @@ -43,23 +44,41 @@ def on_startup(command: Literal["build", "gh-deploy", "serve"], dirty: bool) -> @event_priority(-50) -def on_page_markdown( - markdown: str, page: "Page", config: Config, files: "Files" +def on_page_content( + html: str, page: "Page", config: Config, files: "Files" ) -> Optional[str]: if "examples" not in page.url: return logger.info( f"Replacing binder link with link to notebook in repository for notebooks in {page.url}" ) + repo_name = config["repo_name"] root_dir = Path(config["docs_dir"]).parent notebooks_dir = root_dir / "notebooks" notebook_filename = Path(page.file.src_path).name file_path = (notebooks_dir / notebook_filename).relative_to(root_dir) + + soup = BeautifulSoup(html, features="html.parser") + binder_anchor = None + for a in soup.find_all("a", href=True, limit=5): + if BINDER_BASE_URL in a["href"]: + binder_anchor = a + break + if binder_anchor is None: + logger.warning(f"Binder link was not found in notebook {file_path}") + return + url_path = f"%2Ftree%2F{file_path}" binder_url = f"{BINDER_BASE_URL}/gh/{repo_name}/{branch_name}?urlpath={url_path}" - binder_link = f"{BINDER_LOGO_WITHOUT_CAPTION}({binder_url})" logger.info(f"New binder url: {binder_url}") - logger.info(f"Using regex: {BINDER_LINK_PATTERN}") - markdown = re.sub(BINDER_LINK_PATTERN, binder_link, markdown) - return markdown + + binder_anchor["href"] = binder_url + binder_img = binder_anchor.find("img") + binder_img["style"] = "margin: auto; display: block; width: 7rem" + binder_img_caption = binder_anchor.find("figcaption") + binder_img_caption.decompose() + + html = soup.prettify() + + return html diff --git a/data/top_hits_spotify_dataset.csv b/data/top_hits_spotify_dataset.csv index 8639f69ab..3ae2e1b2b 100644 --- a/data/top_hits_spotify_dataset.csv +++ b/data/top_hits_spotify_dataset.csv @@ -1,2001 +1,2001 @@ -artist,song,duration_ms,explicit,year,popularity,danceability,energy,key,loudness,mode,speechiness,acousticness,instrumentalness,liveness,valence,tempo,genre -Britney Spears,Oops!...I Did It Again,211160,False,2000,77,0.751,0.834,1,-5.444,0,0.0437,0.3,1.77e-05,0.355,0.894,95.053,pop -blink-182,All The Small Things,167066,False,1999,79,0.434,0.897,0,-4.918,1,0.0488,0.0103,0.0,0.612,0.684,148.726,"rock, pop" -Faith Hill,Breathe,250546,False,1999,66,0.529,0.496,7,-9.007,1,0.029,0.173,0.0,0.251,0.278,136.859,"pop, country" -Bon Jovi,It's My Life,224493,False,2000,78,0.551,0.913,0,-4.063,0,0.0466,0.0263,1.35e-05,0.347,0.544,119.992,"rock, metal" -*NSYNC,Bye Bye Bye,200560,False,2000,65,0.614,0.928,8,-4.806,0,0.0516,0.0408,0.00104,0.0845,0.879,172.656,pop -Sisqo,Thong Song,253733,True,1999,69,0.706,0.888,2,-6.959,1,0.0654,0.119,9.64e-05,0.07,0.714,121.549,"hip hop, pop, R&B" -Eminem,The Real Slim Shady,284200,True,2000,86,0.949,0.661,5,-4.244,0,0.0572,0.0302,0.0,0.0454,0.76,104.504,hip hop -Robbie Williams,Rock DJ,258560,False,2000,68,0.708,0.772,7,-4.264,1,0.0322,0.0267,0.0,0.467,0.861,103.035,"pop, rock" -Destiny's Child,Say My Name,271333,False,1999,75,0.713,0.678,5,-3.525,0,0.102,0.273,0.0,0.149,0.734,138.009,"pop, R&B" -Modjo,Lady - Hear Me Tonight,307153,False,2001,77,0.72,0.808,6,-5.627,1,0.0379,0.00793,0.0293,0.0634,0.869,126.041,Dance/Electronic -Gigi D'Agostino,L'Amour Toujours,238759,False,2011,1,0.617,0.728,7,-7.932,1,0.0292,0.0328,0.0482,0.36,0.808,139.066,pop -Eiffel 65,Move Your Body - Gabry Ponte Original Radio Edit,268863,False,1999,56,0.745,0.958,7,-9.664,1,0.0287,0.0813,0.324,0.533,0.96,129.962,pop -Bomfunk MC's,Freestyler,306333,False,2000,55,0.822,0.922,11,-5.798,0,0.0989,0.0291,0.325,0.252,0.568,163.826,pop -Sting,Desert Rose,285960,False,1999,62,0.586,0.659,0,-7.92,0,0.0304,0.011,0.0,0.106,0.147,111.989,"rock, pop" -Melanie C,Never Be The Same Again,294200,False,1999,61,0.689,0.685,3,-5.153,1,0.0478,0.0921,0.0,0.119,0.398,160.067,"pop, Dance/Electronic" -Aaliyah,Try Again,284000,False,2002,53,0.797,0.622,6,-5.642,0,0.29,0.0807,0.0,0.0841,0.731,93.02,"hip hop, pop, R&B" -Anastacia,I'm Outta Love - Radio Edit,245400,False,1999,64,0.761,0.716,10,-5.8,0,0.056,0.396,0.0,0.0771,0.649,119.41,pop -Alice Deejay,Better Off Alone,214883,False,2000,73,0.671,0.88,8,-6.149,0,0.0552,0.00181,0.691,0.285,0.782,136.953,pop -Gigi D'Agostino,The Riddle,285426,False,1999,64,0.74,0.876,6,-6.87,0,0.0369,0.0173,0.00152,0.0785,0.825,127.002,pop -Dr. Dre,The Next Episode,161506,True,1999,82,0.922,0.909,10,-2.429,0,0.27,0.0281,0.0,0.0856,0.309,95.295,hip hop -Linkin Park,In the End,216880,False,2000,83,0.556,0.864,3,-5.87,0,0.0584,0.00958,0.0,0.209,0.4,105.143,"rock, metal" -Tom Jones,Sexbomb,211893,False,1999,65,0.801,0.876,8,-3.94,0,0.0446,0.144,1.38e-05,0.104,0.932,122.979,"rock, Folk/Acoustic, easy listening" -Sonique,It Feels So Good,240866,False,2000,62,0.634,0.677,5,-7.278,0,0.0304,0.0117,0.00103,0.126,0.558,135.012,pop -M.O.P.,Cold as Ice,244466,True,2000,54,0.656,0.88,11,-5.425,0,0.143,0.0421,0.0,0.294,0.758,85.565,hip hop -Melanie C,I Turn To You,352173,False,1999,54,0.522,0.803,1,-5.825,1,0.0327,0.00117,0.00167,0.31,0.0783,135.205,"pop, Dance/Electronic" -Limp Bizkit,Take A Look Around,321040,False,2000,72,0.425,0.852,11,-5.607,1,0.046,0.0175,0.306,0.0935,0.512,101.968,metal -Darude,Sandstorm,225493,False,2001,69,0.528,0.965,11,-7.984,0,0.0465,0.141,0.985,0.0797,0.587,136.065,"pop, Dance/Electronic" -Da Brat,What'chu Like (feat. Tyrese),221160,True,2000,53,0.879,0.681,10,-8.951,0,0.24,0.017,0.0,0.0669,0.817,99.974,"hip hop, pop, R&B" -Moloko,The Time Is Now,318280,False,2000,54,0.682,0.743,9,-10.644,0,0.165,0.35,0.000129,0.277,0.546,127.962,"pop, Dance/Electronic" -Chicane,Don't Give Up,210786,False,2016,47,0.644,0.72,10,-9.635,0,0.0419,0.00145,0.504,0.0839,0.53,132.017,Dance/Electronic -DMX,Party Up,268866,True,1999,71,0.51,0.931,11,-3.302,1,0.347,0.0738,0.0,0.5,0.53,201.936,"hip hop, pop" -Debelah Morgan,Dance with Me,220106,False,2000,49,0.85,0.674,7,-7.981,0,0.0373,0.309,0.000645,0.0356,0.74,115.005,"pop, R&B" -Madonna,Music,225973,False,2000,58,0.736,0.802,7,-8.527,1,0.0663,0.00149,0.0876,0.14,0.871,119.854,pop -Ruff Endz,No More,242560,False,2000,52,0.839,0.641,10,-5.669,0,0.0858,0.0324,4.56e-06,0.0602,0.927,97.004,R&B -Britney Spears,Born to Make You Happy,243533,False,1999,58,0.633,0.922,11,-4.842,0,0.0454,0.116,0.000465,0.071,0.686,84.11,pop -Montell Jordan,Get It On Tonite,276266,False,1999,59,0.813,0.491,10,-9.923,0,0.077,0.241,4.61e-05,0.0817,0.868,99.008,"hip hop, pop, R&B" -Kylie Minogue,Spinning Around,207866,False,2000,55,0.761,0.662,6,-7.645,0,0.0548,0.292,6.19e-05,0.0956,0.631,120.043,"pop, Dance/Electronic" -JAY-Z,Big Pimpin',283066,True,1999,69,0.88,0.814,11,-6.307,0,0.14,0.168,0.00672,0.0584,0.942,138.083,hip hop -LeAnn Rimes,I Need You,229826,False,2001,61,0.478,0.736,7,-7.124,1,0.0367,0.02,9.58e-05,0.118,0.564,144.705,"pop, country" -Avant,Separated,255600,False,2000,55,0.798,0.48,0,-5.564,1,0.0276,0.247,0.0,0.237,0.643,108.241,"pop, R&B" -Enrique Iglesias,Be With You,219360,False,1999,54,0.683,0.866,1,-5.436,0,0.0329,0.0395,0.00161,0.0483,0.542,121.996,"pop, latin" -Toni Braxton,He Wasn't Man Enough,261933,False,2000,66,0.739,0.947,11,-1.916,0,0.0411,0.00916,3.14e-05,0.326,0.766,88.009,"pop, R&B" -Bow Wow,Bounce With Me (feat. Xscape) - Edited Album Version,175893,False,2000,36,0.852,0.75,8,-5.153,1,0.168,0.434,0.0,0.265,0.934,72.016,"hip hop, pop, R&B" -Dr. Dre,Forgot About Dre,222293,True,1999,79,0.924,0.74,8,-1.299,1,0.0774,0.0827,0.0,0.163,0.621,133.974,hip hop -Missy Elliott,Hot Boyz,215466,True,1998,49,0.727,0.445,1,-11.241,1,0.291,0.339,0.0,0.18,0.527,81.125,"hip hop, pop, R&B" -Backstreet Boys,Show Me the Meaning of Being Lonely,234960,False,1999,68,0.63,0.625,6,-5.088,0,0.0252,0.231,0.0,0.0765,0.683,167.998,pop -Samantha Mumba,Gotta Tell You,201946,False,2018,43,0.729,0.632,0,-8.75,0,0.0279,0.191,0.0,0.166,0.774,109.981,pop -Mýa,Case Of The Ex (Whatcha Gonna Do),236906,False,2000,59,0.772,0.688,1,-4.715,0,0.0405,0.0548,9.79e-05,0.0725,0.348,98.0,"pop, R&B" -Mary Mary,Shackles (Praise You),198346,False,2000,64,0.779,0.834,7,-2.773,1,0.162,0.0343,0.0,0.0886,0.8,100.46,R&B -Next,Wifey,243666,False,2004,52,0.829,0.652,7,-8.693,0,0.108,0.067,0.0,0.0812,0.726,99.581,"hip hop, pop, R&B" -Janet Jackson,Doesn't Really Matter,265026,False,2001,47,0.771,0.796,5,-3.081,0,0.076,0.0993,0.00278,0.0981,0.801,99.316,"pop, R&B" -Ricky Martin,She Bangs - English Version,280626,False,2000,60,0.63,0.95,1,-4.012,1,0.0806,0.000915,6.51e-06,0.373,0.858,143.866,"pop, latin" -Jagged Edge,He Can't Love U,244053,False,2000,55,0.721,0.836,8,-3.972,0,0.206,0.112,0.0,0.235,0.508,126.279,"hip hop, pop, R&B" -Sisqo,Incomplete,274226,True,1999,60,0.746,0.443,1,-7.693,0,0.0771,0.282,0.0,0.14,0.272,119.311,"hip hop, pop, R&B" -JAY-Z,I Just Wanna Love U (Give It 2 Me),227866,True,2000,59,0.8,0.922,4,-5.125,0,0.24,0.301,9.31e-06,0.0352,0.801,98.631,hip hop -Mariah Carey,Thank God I Found You (feat. Joe & 98°),257360,False,1999,59,0.348,0.532,10,-5.882,1,0.0331,0.592,0.0,0.106,0.148,129.297,"pop, R&B" -Baha Men,Who Let The Dogs Out,198400,False,2000,65,0.869,0.887,0,-4.505,1,0.0993,0.0605,0.0,0.148,0.784,129.221,R&B -Donell Jones,"U Know What's Up (feat. Lisa ""Left Eye"" Lopes)",243733,True,1999,63,0.854,0.543,8,-6.166,0,0.0844,0.0402,5.73e-05,0.0419,0.868,103.032,"pop, R&B" -LeAnn Rimes,Can't Fight The Moonlight,215506,False,2001,65,0.628,0.834,6,-6.341,0,0.0497,0.403,0.0,0.051,0.626,97.865,"pop, country" -Oasis,Go Let It Out,278666,False,2000,0,0.408,0.849,2,-5.631,1,0.0333,0.0136,2.51e-05,0.56,0.628,84.192,"Folk/Acoustic, rock" -DJ Ötzi,Hey Baby (Radio Mix),219240,False,2010,58,0.666,0.968,10,-3.196,1,0.046,0.123,0.0,0.347,0.834,135.099,"pop, easy listening, Dance/Electronic" -P!nk,Most Girls,298960,False,2000,52,0.742,0.732,2,-6.046,0,0.0311,0.0424,0.00446,0.101,0.694,97.922,pop -Mariah Carey,Against All Odds (Take A Look at Me Now) (feat. Westlife),199480,False,2011,0,0.471,0.514,1,-5.599,1,0.0315,0.584,0.0,0.103,0.373,117.338,"pop, R&B" -Craig David,Fill Me In,257200,False,2000,60,0.682,0.744,8,-6.981,1,0.0365,0.376,0.00951,0.06,0.827,132.493,"hip hop, pop, R&B" -Christina Aguilera,I Turn to You,273706,False,1999,61,0.599,0.47,1,-8.356,1,0.0376,0.38,0.0,0.111,0.298,127.177,pop -Madonna,American Pie,273533,False,2000,58,0.631,0.734,5,-7.48,0,0.036,0.348,0.0,0.135,0.591,124.036,pop -Red Hot Chili Peppers,Otherside,255373,False,1999,78,0.458,0.795,0,-3.265,1,0.0574,0.00316,0.000202,0.0756,0.513,123.229,rock -Sammie,I Like It,251040,False,2000,55,0.826,0.656,9,-8.529,1,0.0617,0.0101,0.000113,0.0272,0.852,129.963,"hip hop, pop, R&B" -Craig David,7 Days,235133,False,2000,70,0.659,0.812,4,-7.499,0,0.0487,0.23,0.0,0.0951,0.888,83.014,"hip hop, pop, R&B" -Santana,Maria Maria (feat. The Product G&B),261973,False,1999,66,0.777,0.601,2,-5.931,1,0.126,0.0406,0.00201,0.0348,0.68,97.911,"rock, blues, latin" -Kandi,Don't Think I'm Not,243533,False,2000,55,0.859,0.622,11,-8.196,1,0.0445,0.0661,0.0,0.0394,0.433,134.007,"pop, R&B" -P!nk,There You Go,202800,False,2000,55,0.822,0.847,10,-6.729,0,0.0917,0.0854,0.0,0.0452,0.668,107.908,pop -Vengaboys,Shalala Lala,214819,False,2000,58,0.751,0.901,2,-5.802,1,0.0328,0.0504,0.00308,0.0395,0.973,124.017,pop -Ronan Keating,Life Is A Rollercoaster,234826,False,2000,59,0.655,0.791,0,-8.923,1,0.0302,0.1,0.000124,0.334,0.862,118.981,"pop, rock" -Madison Avenue,Don't Call Me Baby,228140,False,1999,56,0.808,0.982,3,-6.588,0,0.0311,0.0585,0.00689,0.35,0.961,124.999,Dance/Electronic -Destiny's Child,"Jumpin', Jumpin'",230200,False,1999,70,0.771,0.685,1,-4.639,1,0.0567,0.00543,0.00157,0.0537,0.683,88.997,"pop, R&B" -Céline Dion,That's the Way It Is,241373,False,1999,64,0.634,0.886,9,-5.424,1,0.0434,0.154,0.0,0.118,0.577,93.04,pop -3 Doors Down,Kryptonite,233933,False,2000,78,0.545,0.865,11,-5.708,0,0.0286,0.00664,1.1e-05,0.168,0.543,99.009,"pop, rock, metal" -Carl Thomas,I Wish,226760,False,2000,52,0.736,0.666,1,-4.929,1,0.0337,0.0593,3.82e-05,0.107,0.224,89.824,"pop, R&B" -Mystikal,Shake Ya Ass,256973,True,2000,57,0.914,0.607,7,-5.658,1,0.32,0.0626,0.0,0.0515,0.666,98.054,"hip hop, pop" -Fuel,Hemorrhage (In My Hands),236866,False,2000,49,0.313,0.831,1,-3.894,1,0.0404,0.000127,0.000341,0.24,0.332,152.034,"rock, pop, metal" -Donell Jones,Where I Wanna Be,253626,False,1999,57,0.664,0.396,5,-9.131,0,0.0298,0.52,0.0,0.268,0.453,102.053,"pop, R&B" -Savage Garden,Crash and Burn,281466,False,1999,54,0.581,0.607,4,-8.458,1,0.028,0.189,1.6e-06,0.0882,0.213,102.03,pop -Westlife,My Love,231760,False,2000,68,0.491,0.593,0,-5.975,1,0.0255,0.098,0.0,0.257,0.328,144.142,pop -All Saints,Pure Shores,268746,False,2000,62,0.631,0.664,6,-9.197,1,0.0242,0.0498,0.00042,0.0696,0.407,100.618,pop -Destiny's Child,"Independent Women, Pt. 1",221133,False,2001,65,0.73,0.602,6,-3.782,0,0.206,0.362,3.69e-06,0.169,0.927,97.954,"pop, R&B" -*NSYNC,It's Gonna Be Me,191040,False,2000,60,0.644,0.874,0,-4.666,0,0.0801,0.0459,2.24e-06,0.0584,0.882,165.09,pop -Erykah Badu,Bag Lady,348893,False,2000,54,0.724,0.416,5,-8.964,0,0.0841,0.365,0.0,0.0969,0.578,151.181,"hip hop, R&B" -Marc Anthony,You Sang To Me,347106,False,1999,56,0.578,0.894,10,-5.42,1,0.0296,0.0103,2.66e-06,0.216,0.741,165.98,"pop, latin" -Matchbox Twenty,Bent,256133,False,2000,54,0.518,0.83,6,-6.814,0,0.0386,0.0404,3.85e-06,0.375,0.527,95.468,"pop, rock" -Gabrielle,Rise,219093,False,2001,60,0.558,0.481,8,-9.487,1,0.026,0.315,8.83e-06,0.09,0.631,144.673,"pop, R&B" -Backstreet Boys,Shape of My Heart,230093,False,2000,70,0.575,0.786,9,-4.353,1,0.0296,0.252,0.0,0.159,0.518,96.102,pop -Creed,With Arms Wide Open,274800,False,1999,64,0.41,0.539,0,-8.412,1,0.0302,0.00425,0.00089,0.117,0.141,138.852,"pop, rock, metal" -The Corrs,Breathless,207506,False,2000,68,0.607,0.82,11,-7.754,1,0.0597,0.0541,6.8e-05,0.269,0.768,126.988,"pop, Folk/Acoustic" -Joe,I Wanna Know,296693,False,2000,65,0.725,0.487,8,-5.959,0,0.0368,0.26,1.09e-05,0.431,0.599,136.086,"pop, R&B" -Wheatus,Teenage Dirtbag,241666,True,1999,71,0.625,0.85,4,-3.904,1,0.0495,0.346,0.000233,0.174,0.633,94.661,set() -Christina Aguilera,Come on over Baby (All I Want Is You) - Radio Version,203333,False,1999,64,0.829,0.915,8,-3.205,1,0.106,0.226,1.25e-05,0.246,0.779,118.903,pop -Creed,Higher,316733,False,1999,69,0.459,0.83,2,-6.254,1,0.0364,5.15e-05,0.00014,0.206,0.431,155.827,"pop, rock, metal" -Britney Spears,Lucky,206226,False,2000,65,0.765,0.791,8,-5.707,1,0.0317,0.262,0.000154,0.0669,0.966,95.026,pop -Nelly,Country Grammar (Hot Shit),287000,True,2000,68,0.865,0.664,2,-6.822,1,0.108,0.00689,0.0,0.142,0.565,162.831,"hip hop, pop, R&B" -Shaggy,It Wasn't Me,227600,False,2000,76,0.853,0.606,0,-4.596,1,0.0713,0.0561,0.0,0.313,0.654,94.759,"hip hop, pop" -Destiny's Child,Survivor,254026,False,2001,70,0.514,0.911,1,-2.027,0,0.41,0.0559,0.0,0.775,0.619,161.109,"pop, R&B" -Eminem,Stan,404106,True,2000,83,0.78,0.768,6,-4.325,0,0.238,0.0371,2.34e-06,0.518,0.507,80.063,hip hop -Kylie Minogue,Can't Get You out of My Head,230640,False,2001,73,0.766,0.563,9,-7.516,0,0.0339,0.0263,0.683,0.115,0.964,126.007,"pop, Dance/Electronic" -Christina Aguilera,"Lady Marmalade - From ""Moulin Rouge"" Soundtrack",264893,False,2001,68,0.76,0.801,5,-3.769,1,0.0534,0.0144,1.49e-05,0.665,0.653,109.919,pop -Nelly Furtado,I'm Like A Bird,243160,False,2000,1,0.622,0.608,10,-5.085,1,0.036,0.138,5.38e-06,0.273,0.607,89.661,"hip hop, pop, latin" -Shakira,"Whenever, Wherever",196160,False,2001,74,0.794,0.832,1,-4.862,0,0.0407,0.237,1.14e-05,0.203,0.871,107.657,"pop, latin" -Jimmy Eat World,The Middle,165853,False,2001,78,0.643,0.849,2,-5.428,1,0.0526,0.0371,0.0,0.058,0.903,162.152,"rock, pop" -Train,Drops of Jupiter (Tell Me),259933,False,2001,77,0.481,0.638,0,-5.862,1,0.0276,0.153,0.0,0.154,0.497,79.064,pop -Geri Halliwell,It's Raining Men,254640,False,2001,62,0.637,0.929,5,-6.03,0,0.0447,0.063,0.00796,0.318,0.604,136.482,pop -Blu Cantrell,Hit 'Em Up Style (Oops!),250706,False,2001,71,0.667,0.773,5,-4.983,0,0.0586,0.201,0.0,0.404,0.667,89.976,"pop, R&B" -Britney Spears,I'm a Slave 4 U,203600,False,2001,69,0.847,0.843,5,-3.579,0,0.106,0.415,0.000134,0.107,0.963,110.027,pop -Kylie Minogue,In Your Eyes,197826,False,2001,62,0.689,0.894,6,-6.342,0,0.0672,0.133,4.72e-05,0.0681,0.709,123.971,"pop, Dance/Electronic" -Missy Elliott,One Minute Man (feat. Ludacris),252986,True,2001,57,0.622,0.669,9,-8.419,1,0.329,0.0266,2.97e-06,0.152,0.57,93.839,"hip hop, pop, R&B" -Mary J. Blige,Family Affair,265866,False,2001,76,0.911,0.551,8,-3.75,0,0.0449,0.132,4.12e-05,0.0863,0.969,92.887,"pop, R&B" -Faithless,We Come 1 - Radio Edit,222435,False,2015,53,0.645,0.903,5,-10.587,0,0.0441,0.00188,0.799,0.147,0.61,135.977,"pop, Dance/Electronic" -Limp Bizkit,Rollin' (Air Raid Vehicle),213760,True,2000,73,0.603,0.933,1,-3.358,1,0.171,0.00591,0.0,0.206,0.709,96.306,metal -Lasgo,Something,220973,False,2001,65,0.643,0.981,7,-6.644,0,0.0439,0.0271,8.93e-05,0.11,0.38,140.01,pop -iio,Rapture (feat.Nadia Ali),253586,False,2006,54,0.661,0.855,8,-8.403,1,0.0377,0.0722,0.0185,0.199,0.601,123.943,Dance/Electronic -Emma Bunton,What Took You So Long?,241000,False,2001,54,0.668,0.772,9,-5.4,0,0.0307,0.123,0.0,0.341,0.911,118.011,pop -112,It's Over Now,264933,False,2001,57,0.66,0.71,1,-4.541,1,0.0409,0.0106,7.01e-06,0.0736,0.233,97.988,"hip hop, pop, R&B" -Blue,All Rise,223546,False,2001,63,0.721,0.737,5,-2.734,0,0.0324,0.121,0.0,0.165,0.931,97.996,pop -Jessica Simpson,Irresistible,194026,False,2001,43,0.657,0.965,8,-2.771,0,0.0556,0.0285,8.84e-05,0.0552,0.669,93.013,"pop, R&B" -Crazy Town,Butterfly,216733,False,1999,71,0.736,0.811,9,-4.17,0,0.081,0.00132,0.000142,0.107,0.609,103.502,"rock, metal" -Michael Jackson,You Rock My World,337733,False,2001,64,0.854,0.673,4,-3.132,0,0.185,0.038,0.000227,0.255,0.955,95.0,"pop, R&B" -Eve,Let Me Blow Ya Mind,230133,True,2001,73,0.908,0.557,8,-4.243,0,0.107,0.242,0.0,0.0709,0.897,90.032,"hip hop, pop, R&B" -Jennifer Lopez,Ain't It Funny,246160,False,2001,0,0.707,0.869,5,-4.525,0,0.0481,0.104,0.000121,0.0813,0.621,99.825,"hip hop, pop, R&B" -Brandy,Another Day in Paradise - R&B-Version,271626,False,2002,50,0.7,0.787,6,-5.176,0,0.0327,0.00666,3.68e-05,0.0724,0.556,102.043,"hip hop, pop, R&B" -Nickelback,How You Remind Me,223840,False,2001,78,0.446,0.764,10,-5.042,1,0.033,0.00135,0.0,0.099,0.543,172.094,"rock, metal" -Daft Punk,One More Time,320357,False,2001,76,0.613,0.697,2,-8.618,1,0.133,0.0194,0.0,0.332,0.476,122.746,"hip hop, Dance/Electronic" -Outkast,Ms. Jackson,270506,True,2000,82,0.843,0.806,4,-5.946,0,0.269,0.143,0.0,0.0771,0.613,94.948,"hip hop, pop" -Fragma,Everytime You Need Me - Radio Version,213346,False,2001,50,0.682,0.917,11,-5.459,0,0.0318,0.15,0.0676,0.34,0.79,137.029,"pop, Dance/Electronic" -Mariah Carey,Loverboy,229173,False,2001,42,0.721,0.79,1,-4.125,1,0.124,0.183,0.0,0.1,0.821,103.141,"pop, R&B" -Dido,Thank You,218360,False,1999,73,0.725,0.583,1,-9.942,0,0.0427,0.3,0.000238,0.0665,0.762,79.984,pop -Joe,Stutter (feat. Mystikal) - Double Take Remix,213026,False,2000,57,0.767,0.759,6,-6.516,1,0.117,0.0513,0.0,0.31,0.677,89.989,"pop, R&B" -P.O.D.,Youth of the Nation,256240,False,2001,69,0.563,0.86,8,-7.533,1,0.0621,0.00834,0.0106,0.39,0.517,97.867,"rock, metal" -Jennifer Lopez,Play,211493,True,2001,57,0.775,0.729,1,-4.229,0,0.162,0.0303,0.00247,0.0361,0.895,104.719,"hip hop, pop, R&B" -Missy Elliott,Get Ur Freak On,211120,True,2001,68,0.797,0.75,0,-9.369,1,0.247,0.533,0.108,0.095,0.74,177.87,"hip hop, pop, R&B" -Ricky Martin,Nobody Wants to Be Lonely (with Christina Aguilera),252706,False,2008,52,0.635,0.854,10,-5.02,0,0.0612,0.00579,0.0083,0.0623,0.59,100.851,"pop, latin" -Christina Milian,AM To PM,231213,False,2001,60,0.872,0.868,10,-3.036,0,0.12,0.153,2.26e-05,0.843,0.822,105.005,"hip hop, pop, R&B" -Roger Sanchez,Another Chance,452906,False,2000,50,0.61,0.82,3,-10.029,1,0.0312,0.000211,0.146,0.138,0.45,127.993,Dance/Electronic -Gorillaz,Clint Eastwood,340920,True,2001,74,0.663,0.694,10,-8.627,0,0.171,0.0253,0.0,0.0698,0.525,167.953,hip hop -2Pac,Until The End Of Time,266506,True,2001,57,0.757,0.706,8,-6.665,1,0.203,0.0267,0.0,0.197,0.375,96.976,hip hop -Tamia,Stranger in My House,285386,False,2000,48,0.676,0.601,0,-4.905,1,0.0297,0.103,0.0,0.147,0.206,119.94,"pop, R&B" -Mary J. Blige,No More Drama,326240,False,2001,61,0.653,0.837,6,-6.818,0,0.124,0.534,7.12e-05,0.0784,0.64,97.914,"pop, R&B" -Jennifer Lopez,Love Don't Cost a Thing,221226,False,2001,67,0.786,0.842,4,-5.115,0,0.0707,0.00305,3.54e-06,0.473,0.685,97.577,"hip hop, pop, R&B" -Jamiroquai,Little L,295400,False,2001,65,0.878,0.724,10,-5.373,0,0.129,0.168,0.0116,0.133,0.904,121.906,pop -Case,Missing You,284666,False,2001,55,0.612,0.579,1,-6.417,0,0.0553,0.285,0.0,0.0556,0.716,86.31,"hip hop, pop, R&B" -112,Peaches & Cream,193093,False,2001,63,0.677,0.52,4,-6.255,0,0.334,0.00277,0.0,0.0532,0.768,203.862,"hip hop, pop, R&B" -Five,Let's Dance - Radio Edit,218626,False,2001,47,0.631,0.821,4,-7.853,0,0.0867,0.00876,0.000247,0.293,0.547,118.007,pop -Rui Da Silva,Touch Me (Radio Edit) [feat. Cassandra],213133,False,2001,54,0.464,0.922,0,-8.399,1,0.0638,0.124,0.227,0.0546,0.695,129.078,Dance/Electronic -Alien Ant Farm,Smooth Criminal,209266,False,2001,75,0.653,0.964,9,-4.261,0,0.0582,0.00316,0.00512,0.144,0.87,126.928,"rock, metal" -Erick Sermon,Music (feat. Marvin Gaye),223133,True,2001,57,0.897,0.466,10,-9.053,0,0.203,0.187,1.35e-06,0.0757,0.884,100.01,"hip hop, pop" -D12,Purple Pills,304506,True,2001,68,0.78,0.634,1,-5.941,1,0.16,0.0199,2.98e-05,0.29,0.754,125.25,"hip hop, pop, rock" -Usher,Pop Ya Collar - Radio Edit,210813,False,2016,26,0.888,0.8,5,-3.944,0,0.0946,0.0901,2.12e-05,0.229,0.866,106.957,"hip hop, pop, R&B" -Madonna,Don't Tell Me,280973,False,2000,53,0.699,0.618,7,-7.338,1,0.0594,0.0502,0.000925,0.0914,0.679,99.965,pop -Blue,If You Come Back,207560,False,2001,58,0.582,0.707,10,-4.487,1,0.0319,0.0822,0.0,0.0467,0.701,78.375,pop -Atomic Kitten,Eternal Flame - Single Version,195506,False,2001,58,0.578,0.581,0,-6.867,1,0.0318,0.0658,0.0042,0.0674,0.408,83.293,pop -Jagged Edge,Where the Party At (feat. Nelly),232573,False,2001,67,0.596,0.661,5,-6.239,0,0.226,0.31,0.0,0.0847,0.86,129.491,"hip hop, pop, R&B" -Afroman,Because I Got High,197760,True,2001,68,0.802,0.341,7,-8.56,1,0.488,0.169,0.0,0.0783,0.849,166.01,hip hop -S Club 7,Don't Stop Movin',233626,False,2001,63,0.822,0.672,7,-6.133,1,0.0329,0.0285,0.0,0.213,0.91,117.033,pop -Craig David,Fill Me In,257200,False,2000,60,0.682,0.744,8,-6.981,1,0.0365,0.376,0.00951,0.06,0.827,132.493,"hip hop, pop, R&B" -Nelly Furtado,Turn Off The Light,276106,False,2000,1,0.587,0.679,2,-6.26,1,0.0927,0.0839,0.000316,0.413,0.65,180.184,"hip hop, pop, latin" -The Supermen Lovers,Starlight - Radio Edit,234400,False,2001,61,0.739,0.679,0,-5.079,1,0.0373,0.000424,0.0187,0.389,0.786,127.487,Dance/Electronic -Mis-Teeq,All I Want - Sunship Radio Edit,207400,False,2004,46,0.795,0.919,9,-3.07,0,0.0835,0.37,0.00863,0.189,0.96,134.079,"pop, Dance/Electronic" -Jagged Edge,Promise,246720,False,2000,56,0.751,0.568,5,-5.431,0,0.0872,0.136,0.0,0.0288,0.646,128.002,"hip hop, pop, R&B" -S Club 7,Have You Ever,201533,False,2001,60,0.585,0.702,0,-5.734,1,0.0325,0.538,0.0,0.216,0.51,139.909,pop -Enya,Only Time,218546,False,2000,69,0.418,0.249,3,-13.744,1,0.0301,0.841,0.661,0.112,0.213,82.803,"World/Traditional, Folk/Acoustic" -Faith Hill,There You'll Be,222120,False,2001,60,0.321,0.511,8,-7.77,1,0.0306,0.513,0.0,0.127,0.17,128.98,"pop, country" -Ronan Keating,Lovin' Each Day,212973,False,2000,58,0.541,0.899,1,-6.261,1,0.0633,0.0178,2.95e-06,0.286,0.829,106.676,"pop, rock" -Destiny's Child,Bootylicious,207906,False,2001,64,0.84,0.835,1,-4.386,0,0.275,0.00281,1.11e-06,0.152,0.637,103.376,"pop, R&B" -Steps,It's the Way You Make Me Feel,197360,False,2000,53,0.573,0.665,8,-5.081,1,0.0239,0.108,2.09e-06,0.095,0.347,105.006,"pop, Dance/Electronic" -Sunshine Anderson,Heard It All Before,295826,False,2001,56,0.697,0.925,3,-4.209,0,0.216,0.0614,0.0,0.095,0.678,96.951,"pop, R&B" -Lenny Kravitz,Again,231666,False,2000,68,0.55,0.804,2,-5.218,1,0.0271,0.0148,4.33e-05,0.105,0.789,79.166,"rock, pop" -Daniel Bedingfield,Gotta Get Thru This - D'N'D Radio Edit,162333,False,2002,46,0.836,0.762,7,-5.044,0,0.0598,0.0826,5.48e-05,0.102,0.941,133.592,pop -DB Boulevard,Point Of View - Radio Edit,231166,False,2018,0,0.676,0.715,6,-6.854,1,0.0287,0.00284,0.0746,0.0685,0.275,129.006,Dance/Electronic -Janet Jackson,All For You,329933,False,2001,65,0.753,0.934,2,-3.011,1,0.0736,0.0174,0.065,0.128,0.73,113.525,"pop, R&B" -Atomic Kitten,Whole Again,185013,False,2001,66,0.747,0.706,4,-4.653,1,0.0413,0.0844,0.00355,0.174,0.567,94.019,pop -Ja Rule,Livin' It Up,257066,False,2001,64,0.874,0.768,6,-4.086,1,0.311,0.0554,0.0,0.041,0.636,106.095,"hip hop, pop, R&B" -Destiny's Child,"Independent Women, Pt. 1",221133,False,2001,65,0.73,0.602,6,-3.782,0,0.206,0.362,3.69e-06,0.169,0.927,97.954,"pop, R&B" -DJ Pied Piper & The Masters Of Ceremonies,Do You Really Like It? - Radio Edit,217120,False,2001,54,0.847,0.877,2,-5.424,1,0.0493,0.124,0.0,0.148,0.764,131.044,Dance/Electronic -Jennifer Lopez,I'm Real (feat. Ja Rule) - Murder Remix,262133,True,2001,66,0.708,0.587,11,-7.93,0,0.151,0.273,0.0,0.0718,0.554,83.46,"hip hop, pop, R&B" -Musiq Soulchild,Love,304666,False,2000,0,0.569,0.385,1,-9.919,0,0.0499,0.342,0.0,0.0876,0.339,99.738,"pop, R&B" -So Solid Crew,21 Seconds,302826,True,2001,51,0.607,0.637,1,-11.072,1,0.241,0.0191,0.0,0.124,0.818,137.03,Dance/Electronic -Robbie Williams,Eternity,302760,False,2001,48,0.469,0.316,10,-8.106,1,0.0265,0.504,6.04e-06,0.0919,0.199,77.967,"pop, rock" -Basement Jaxx,Romeo,217493,False,2001,0,0.713,0.829,2,-4.171,1,0.0491,0.00769,0.00203,0.139,0.844,126.853,"pop, Dance/Electronic" -Ludacris,Southern Hospitality (Featuring Pharrell),300933,True,2000,54,0.623,0.896,0,-4.719,1,0.247,0.035,0.0,0.0941,0.613,95.158,"hip hop, pop" -Gabrielle,Rise,219093,False,2001,60,0.558,0.481,8,-9.487,1,0.026,0.315,8.83e-06,0.09,0.631,144.673,"pop, R&B" -Nelly,Ride Wit Me,291781,True,2000,75,0.85,0.7,7,-6.49,1,0.0478,0.0616,1.8e-06,0.244,0.722,101.875,"hip hop, pop, R&B" -Trick Daddy,I'm a Thug,254400,True,2001,59,0.933,0.561,11,-5.961,1,0.122,0.0275,0.0,0.102,0.586,139.976,"hip hop, pop" -Travis,Sing,228800,False,2001,68,0.33,0.905,4,-5.047,1,0.0497,0.000233,8.14e-06,0.0712,0.259,163.142,"rock, pop" -JAY-Z,Izzo (H.O.V.A.),240626,True,2001,63,0.618,0.844,1,-4.051,0,0.342,0.0178,0.000126,0.0634,0.697,84.411,hip hop -Gabrielle,Out Of Reach,196986,False,2001,69,0.51,0.48,11,-6.567,1,0.0452,0.503,0.0,0.106,0.496,182.862,"pop, R&B" -OPM,Heaven Is a Halfpipe (If I Die),257426,True,2000,56,0.743,0.894,8,-6.886,1,0.0349,0.0755,0.00283,0.367,0.77,95.9,rock -K-Ci & JoJo,Crazy,262773,False,2019,30,0.68,0.644,0,-4.507,1,0.0258,0.084,0.0,0.549,0.484,116.097,"pop, R&B" -Staind,It's Been Awhile,264706,True,2001,64,0.509,0.774,6,-4.054,1,0.0338,0.00189,0.000549,0.143,0.0824,116.529,"rock, metal" -3LW,No More (Baby I'ma Do Right),263440,False,2000,56,0.721,0.723,2,-7.08,0,0.0631,0.102,4.4e-06,0.0651,0.761,88.933,"pop, R&B" -Ginuwine,Differences,265533,False,2001,65,0.562,0.594,3,-4.578,0,0.0558,0.318,0.0,0.311,0.423,62.876,"hip hop, pop, R&B" -*NSYNC,Gone,292000,False,2001,45,0.699,0.403,11,-8.564,0,0.0614,0.421,0.0,0.126,0.5,113.922,pop -Lifehouse,Hanging By A Moment,216360,False,2000,61,0.537,0.858,1,-4.903,1,0.0349,0.000966,0.0,0.0812,0.502,124.599,"pop, rock, metal" -Nelly,Dilemma,289160,True,2002,77,0.727,0.552,2,-8.074,0,0.14,0.227,0.000164,0.198,0.607,168.189,"hip hop, pop, R&B" -Eminem,Without Me,290320,True,2002,87,0.908,0.669,7,-2.827,1,0.0738,0.00286,0.0,0.237,0.662,112.238,hip hop -Avril Lavigne,Complicated,244506,False,2002,78,0.585,0.776,5,-5.898,1,0.0459,0.0572,7.74e-06,0.3,0.427,77.987,pop -Vanessa Carlton,A Thousand Miles,237493,False,2002,75,0.56,0.825,11,-3.862,1,0.0379,0.323,0.0,0.161,0.268,94.931,"Folk/Acoustic, pop" -The Calling,Wherever You Will Go,208600,False,2001,72,0.558,0.719,2,-5.113,1,0.0267,0.0367,0.0,0.115,0.371,112.027,pop -Shakira,Underneath Your Clothes,224066,False,2001,64,0.616,0.597,8,-5.328,1,0.0415,0.691,0.0,0.104,0.362,165.508,"pop, latin" -No Doubt,Underneath It All,302720,False,2001,27,0.729,0.731,4,-4.822,1,0.0684,0.232,1.57e-06,0.391,0.839,138.202,"rock, pop" -Truth Hurts,Addictive,226440,True,2002,58,0.701,0.677,11,-6.591,0,0.147,0.0737,0.000162,0.187,0.367,99.271,"pop, R&B" -Busta Rhymes,I Know What You Want (feat. Flipmode Squad),324306,True,2002,68,0.648,0.759,6,-4.315,1,0.306,0.0142,0.0,0.648,0.518,85.996,"hip hop, pop" -Eminem,'Till I Collapse,297786,True,2002,85,0.548,0.847,1,-3.237,1,0.186,0.0622,0.0,0.0816,0.1,171.447,hip hop -Diddy,"I Need a Girl (Pt. 2) [feat. Loon, Ginuwine & Mario Winans]",285586,False,2004,69,0.713,0.471,1,-7.392,0,0.483,0.421,0.0,0.0308,0.779,199.764,"hip hop, pop" -Christina Milian,When You Look At Me - Radio Edit,222546,False,2001,60,0.955,0.839,3,-3.399,0,0.127,0.0934,0.000175,0.099,0.825,108.955,"hip hop, pop, R&B" -Madonna,Die Another Day,276360,False,2003,53,0.792,0.797,0,-5.166,0,0.0901,0.062,0.00325,0.0896,0.491,123.993,pop -Jennifer Lopez,I'm Gonna Be Alright (feat. Nas),172240,False,2002,53,0.718,0.69,10,-4.382,0,0.265,0.106,0.0,0.271,0.774,93.401,"hip hop, pop, R&B" -Angie Martinez,If I Could Go! (feat. Lil' Mo & Sacario),244466,False,2019,40,0.583,0.643,9,-7.486,0,0.355,0.171,0.0,0.0395,0.7,195.685,pop -Linkin Park,In the End,216880,False,2000,83,0.556,0.864,3,-5.87,0,0.0584,0.00958,0.0,0.209,0.4,105.143,"rock, metal" -Las Ketchup,The Ketchup Song (Aserejé) - Spanglish Version,213973,False,2002,66,0.607,0.923,1,-6.777,1,0.0948,0.0193,1.1e-06,0.0924,0.868,184.819,set() -Aaliyah,Rock The Boat,275026,False,2019,0,0.641,0.72,5,-5.209,1,0.0336,0.0688,0.00286,0.193,0.418,92.988,"hip hop, pop, R&B" -Holly Valance,Kiss Kiss,204400,False,2002,54,0.705,0.717,4,-4.944,0,0.125,0.00369,0.461,0.0701,0.554,97.036,"pop, Dance/Electronic" -Sugababes,Round Round,236426,False,2002,59,0.74,0.845,6,-3.802,0,0.0338,0.00287,6.23e-06,0.115,0.749,126.607,"pop, R&B" -Marilyn Manson,Tainted Love,200426,False,2003,60,0.622,0.876,8,-7.779,1,0.0728,0.00107,1.83e-06,0.292,0.537,142.017,"rock, metal" -Brandy,What About Us?,253200,False,2002,43,0.686,0.698,5,-2.942,0,0.223,0.263,0.0,0.39,0.748,93.118,"hip hop, pop, R&B" -Chad Kroeger,Hero (feat. Josey Scott),200480,False,2003,66,0.427,0.843,4,-4.54,1,0.0364,0.00216,0.0,0.179,0.304,147.387,metal -Britney Spears,Overprotected - Radio Edit,198600,False,2001,61,0.682,0.894,0,-1.73,0,0.0727,0.0381,0.0,0.416,0.845,95.992,pop -Scooter,The Logical Song,234116,False,2013,46,0.527,0.933,6,-6.277,0,0.0382,0.00112,0.0362,0.425,0.493,144.043,"pop, Dance/Electronic" -Missy Elliott,4 My People (feat. Eve),289373,True,2001,49,0.969,0.701,1,-7.503,1,0.156,0.14,0.00161,0.201,0.905,121.392,"hip hop, pop, R&B" -Aaliyah,More Than A Woman,230346,False,2002,42,0.646,0.638,11,-5.803,1,0.0787,0.0556,0.0014,0.182,0.74,86.994,"hip hop, pop, R&B" -Brandy,Full Moon,248933,False,2002,52,0.611,0.654,10,-4.823,0,0.139,0.372,0.00432,0.0926,0.609,105.922,"hip hop, pop, R&B" -Blue,One Love,207186,False,2002,60,0.683,0.722,8,-4.039,0,0.0358,0.0243,4.2e-05,0.35,0.854,95.012,pop -Busta Rhymes,Pass The Courvoisier Part II (feat. P. Diddy & Pharrell) - Remix,238600,True,2001,55,0.697,0.793,6,-4.699,0,0.323,0.0844,0.0,0.0619,0.56,89.767,"hip hop, pop" -Céline Dion,A New Day Has Come - Radio Remix,259773,False,2001,59,0.574,0.691,6,-5.103,1,0.035,0.0826,1.65e-05,0.149,0.195,91.969,pop -Scooter,Nessaja,208449,False,2013,47,0.486,0.904,2,-6.642,1,0.0364,0.000748,0.0984,0.187,0.118,143.048,"pop, Dance/Electronic" -Eve,Gangsta Lovin',239266,True,2002,58,0.723,0.84,1,-3.523,0,0.0608,0.0619,0.0,0.0945,0.827,94.332,"hip hop, pop, R&B" -Disturbing Tha Peace,Move Bitch,272293,True,2002,59,0.777,0.751,1,-5.692,1,0.133,0.245,0.0,0.1,0.191,177.894,"hip hop, pop" -Khia,"My Neck, My Back (Lick It)",222560,True,2015,59,0.859,0.625,9,-4.234,0,0.13,0.0296,0.0,0.0537,0.71,102.072,"pop, R&B" -Tweet,Oops (Oh My) [feat. Missy Elliott],237800,True,2002,64,0.66,0.536,10,-8.786,0,0.29,0.233,0.00669,0.111,0.775,159.963,"hip hop, pop, R&B" -Kylie Minogue,Spinning Around,207866,False,2000,55,0.761,0.662,6,-7.645,0,0.0548,0.292,6.19e-05,0.0956,0.631,120.043,"pop, Dance/Electronic" -Nelly,Hot In Herre,228240,True,2002,75,0.956,0.745,11,-4.753,0,0.12,0.206,0.0,0.0615,0.912,107.075,"hip hop, pop, R&B" -Gareth Gates,Anyone of Us (Stupid Mistake),227866,False,2003,53,0.7,0.72,1,-6.061,1,0.0261,0.285,0.0,0.0845,0.654,106.986,pop -Christina Aguilera,Dirrty (feat. Redman),298853,False,2002,68,0.64,0.889,2,-3.073,1,0.322,0.107,0.0,0.339,0.436,99.931,pop -B2K,Uh Huh,223293,False,2002,49,0.855,0.681,2,-4.955,1,0.0688,0.0881,3.7e-06,0.0592,0.908,99.699,"hip hop, pop, R&B" -Sugababes,Freak Like Me,195866,False,2002,45,0.517,0.919,5,-3.451,1,0.0925,0.0996,1.81e-06,0.557,0.387,91.868,"pop, R&B" -Dirty Vegas,Days Go By,432146,False,2002,48,0.786,0.853,9,-8.274,0,0.0688,0.0499,0.0872,0.35,0.309,126.99,Dance/Electronic -Fat Joe,What's Luv? (feat. Ja-Rule & Ashanti),267093,True,2001,73,0.835,0.707,8,-5.074,1,0.0598,0.0219,2.19e-05,0.108,0.919,93.955,"hip hop, pop" -Liberty X,Just A Little,237359,False,2019,43,0.786,0.614,5,-6.554,0,0.0574,0.00616,0.0,0.049,0.742,103.887,pop -No Doubt,Hella Good,242586,False,2001,26,0.771,0.665,8,-3.954,1,0.035,0.0135,0.0123,0.0903,0.844,115.142,"rock, pop" -LL Cool J,Luv U Better,287000,False,2002,2,0.668,0.806,6,-3.9,1,0.239,0.232,0.0,0.18,0.721,95.022,"hip hop, pop, R&B" -Red Hot Chili Peppers,Can't Stop,269000,False,2002,80,0.618,0.938,9,-3.442,1,0.0456,0.0179,0.0,0.167,0.875,91.455,rock -Musiq Soulchild,Halfcrazy,254493,False,2002,58,0.412,0.77,11,-6.057,0,0.18,0.462,1.5e-06,0.0485,0.418,101.955,"pop, R&B" -Sean Paul,Gimme the Light,228000,True,2002,59,0.778,0.761,2,-5.529,1,0.0525,0.141,4.61e-05,0.0441,0.923,107.288,"hip hop, pop" -Faith Evans,I Love You,267160,False,2001,53,0.489,0.757,11,-4.062,0,0.0926,0.00981,0.0,0.234,0.567,168.004,"hip hop, pop, R&B" -Charli Baltimore,Down 4 U,318213,True,2002,47,0.64,0.59,7,-7.442,1,0.32,0.252,0.0,0.665,0.519,83.133,pop -Kylie Minogue,Love at First Sight,238266,False,2001,55,0.603,0.774,3,-6.066,0,0.0428,0.0288,0.051,0.0533,0.48,124.994,"pop, Dance/Electronic" -N.O.R.E.,Nothin',264653,True,2002,58,0.791,0.863,3,-6.118,0,0.151,0.317,1.13e-06,0.27,0.742,97.035,"hip hop, pop" -Gareth Gates,Unchained Melody,233666,False,2003,35,0.368,0.426,10,-8.049,1,0.0277,0.255,5.39e-06,0.108,0.208,106.52,pop -Diddy,I Need a Girl (Pt. 1) [feat. Usher & Loon],268800,False,2005,63,0.66,0.707,6,-5.758,1,0.208,0.397,0.0,0.211,0.761,89.279,"hip hop, pop" -Big Brovaz,Nu Flow,201480,False,2001,45,0.783,0.726,7,-8.718,1,0.106,0.0903,2.53e-06,0.363,0.784,148.062,pop -No Doubt,Hey Baby,207040,False,2001,24,0.705,0.872,11,-3.557,0,0.125,0.0602,0.0,0.228,0.746,93.63,"rock, pop" -Ashanti,Baby,265706,True,2002,55,0.567,0.58,4,-5.757,0,0.223,0.0084,5.37e-06,0.0342,0.46,157.596,"hip hop, pop, R&B" -Sophie Ellis-Bextor,Murder On The Dancefloor,230013,False,2002,62,0.734,0.848,1,-5.285,0,0.0309,0.00312,1.16e-05,0.313,0.863,117.31,"pop, Dance/Electronic" -Justin Timberlake,Like I Love You,283626,False,2002,62,0.853,0.811,6,-4.927,0,0.0646,0.0439,0.000307,0.0703,0.9,114.964,pop -Mary J. Blige,Rainy Dayz,276373,False,2001,32,0.412,0.667,6,-6.997,1,0.229,0.195,0.0,0.305,0.533,68.942,"pop, R&B" -A1,Caught in the Middle,206466,False,2001,53,0.519,0.874,5,-5.122,1,0.034,0.0524,0.0,0.243,0.572,96.072,pop -Eminem,Cleanin' Out My Closet,297840,True,2002,71,0.908,0.758,9,-4.753,0,0.174,0.0687,0.0,0.0783,0.87,148.015,hip hop -Ashanti,Happy,262226,True,2002,61,0.727,0.515,2,-6.553,0,0.0289,0.241,3.74e-06,0.124,0.695,89.488,"hip hop, pop, R&B" -Delta Goodrem,Born to Try,251280,False,2003,54,0.542,0.589,8,-6.813,1,0.0284,0.246,0.0,0.188,0.336,133.838,pop -P!nk,Get the Party Started,192533,False,2001,68,0.802,0.903,11,-3.267,0,0.046,0.0011,0.0,0.173,0.96,128.93,pop -Alicia Keys,A Woman's Worth,303333,False,2001,52,0.652,0.41,4,-8.323,0,0.158,0.333,0.0,0.064,0.495,75.09,"pop, R&B" -Daniel Bedingfield,Gotta Get Thru This - D'N'D Radio Edit,161240,False,2002,56,0.838,0.764,7,-5.076,0,0.0586,0.0862,0.000122,0.0906,0.924,133.592,pop -Coldplay,Clocks,307879,False,2002,79,0.577,0.749,5,-7.215,0,0.0279,0.599,0.0115,0.183,0.255,130.97,"rock, pop" -Missy Elliott,Work It,263226,True,2002,70,0.884,0.677,1,-5.603,1,0.283,0.0778,0.0,0.0732,0.584,101.868,"hip hop, pop, R&B" -Ronan Keating,If Tomorrow Never Comes,214306,False,2002,60,0.555,0.496,10,-6.136,1,0.028,0.333,0.0,0.0956,0.27,79.01,"pop, rock" -Big Tymers,Still Fly,335613,True,2002,64,0.675,0.607,11,-3.538,1,0.162,0.0302,8.97e-06,0.339,0.56,177.928,"hip hop, pop" -Creed,My Sacrifice,294600,False,2001,69,0.32,0.88,2,-6.035,1,0.0504,0.000101,4.31e-06,0.076,0.257,146.349,"pop, rock, metal" -Ja Rule,Always On Time,245133,True,2001,73,0.839,0.706,5,-6.104,0,0.199,0.208,0.0,0.242,0.839,96.673,"hip hop, pop, R&B" -Ashanti,Foolish,227386,True,2002,70,0.665,0.695,0,-5.763,1,0.0532,0.347,0.0,0.106,0.707,90.119,"hip hop, pop, R&B" -Avril Lavigne,Sk8er Boi,204000,False,2002,75,0.487,0.9,0,-4.417,1,0.0482,6.79e-05,0.0,0.358,0.484,149.937,pop -DJ Sammy,Heaven,233600,False,2002,63,0.571,0.953,8,-5.601,1,0.0548,0.0011,0.000606,0.226,0.601,137.965,pop -Britney Spears,"I'm Not a Girl, Not Yet a Woman",231066,False,2001,58,0.534,0.543,3,-6.857,1,0.0245,0.579,0.0,0.112,0.418,78.996,pop -Cam’ron,Oh Boy,204706,True,2002,67,0.754,0.767,6,-5.586,1,0.145,0.0216,0.0,0.172,0.828,83.014,"hip hop, pop" -Red Hot Chili Peppers,By the Way,216933,False,2002,73,0.451,0.97,0,-4.938,1,0.107,0.0264,0.00355,0.102,0.198,122.444,rock -Elvis Presley,A Little Less Conversation - JXL Radio Edit Remix,211506,False,2002,60,0.597,0.97,4,-5.972,0,0.0502,0.000385,0.205,0.133,0.717,114.999,"rock, easy listening" -Atomic Kitten,The Tide Is High - Radio Mix,206093,False,2002,65,0.783,0.649,7,-4.127,1,0.0322,0.0209,0.00482,0.067,0.665,103.997,pop -*NSYNC,Girlfriend,253600,False,2001,50,0.745,0.807,0,-5.191,0,0.0884,0.0887,1.49e-05,0.0283,0.858,93.967,pop -Robbie Williams,Somethin' Stupid,170493,False,2001,64,0.654,0.515,0,-12.185,1,0.0261,0.429,5.62e-06,0.174,0.677,106.191,"pop, rock" -Alanis Morissette,Hands Clean,269400,False,2002,57,0.513,0.82,7,-5.428,1,0.0299,0.00192,2.83e-06,0.504,0.52,99.952,"pop, Folk/Acoustic" -Usher,U Don't Have to Call,269400,True,2001,59,0.793,0.568,1,-4.958,1,0.0459,0.0478,3.57e-05,0.0304,0.806,100.005,"hip hop, pop, R&B" -Creed,One Last Breath - Radio Version,242000,False,2015,47,0.38,0.725,2,-6.094,1,0.0365,0.00571,0.00015,0.3,0.202,130.581,"pop, rock, metal" -The Goo Goo Dolls,Here Is Gone,238173,False,2002,56,0.338,0.685,1,-5.178,1,0.0322,0.00052,0.000512,0.109,0.221,102.192,"pop, rock" -Puddle Of Mudd,She Hates Me,216760,True,2001,70,0.58,0.748,4,-5.433,1,0.0323,0.00935,0.0,0.753,0.584,109.781,"rock, metal" -Ludacris,Rollout (My Business),296586,True,2001,60,0.921,0.668,7,-8.73,1,0.235,0.0949,0.0,0.592,0.892,131.059,"hip hop, pop" -Blazin' Squad,Crossroads - Radio Edit,188693,False,2012,40,0.661,0.746,4,-5.153,1,0.042,0.109,0.0,0.325,0.502,144.188,set() -Default,Wasting My Time,268693,False,2001,40,0.443,0.769,2,-5.529,1,0.0312,0.00138,0.0,0.0677,0.17,147.973,"rock, pop, metal" -Michelle Branch,All You Wanted,217680,False,2001,60,0.499,0.72,8,-9.101,1,0.0286,0.00466,0.00303,0.122,0.652,96.103,"pop, Folk/Acoustic" -Daniel Bedingfield,If You're Not The One,257026,False,2002,58,0.688,0.538,10,-7.608,1,0.0292,0.504,0.0,0.111,0.27,119.998,pop -Enrique Iglesias,Escape,208626,False,2001,56,0.776,0.844,11,-5.305,1,0.0297,0.0277,0.000244,0.135,0.868,125.972,"pop, latin" -Puddle Of Mudd,Blurry,303920,False,2001,64,0.432,0.936,3,-4.537,0,0.0562,0.00691,0.0,0.14,0.499,157.469,"rock, metal" -Coldplay,In My Place,226680,False,2002,72,0.424,0.588,9,-5.455,1,0.0278,0.0553,4.95e-06,0.298,0.193,144.636,"rock, pop" -Outkast,The Whole World (feat. Killer Mike),295346,True,2001,51,0.814,0.852,2,-6.176,1,0.401,0.0958,1.32e-06,0.0288,0.717,184.682,"hip hop, pop" -P!nk,Don't Let Me Get Me - Radio Edit,210693,False,2001,60,0.624,0.85,0,-4.754,0,0.0756,0.00234,0.0,0.0621,0.615,98.525,pop -50 Cent,In Da Club,193466,True,2003,81,0.899,0.713,6,-2.752,0,0.366,0.255,0.0,0.0708,0.777,90.051,"hip hop, pop" -Sean Paul,Get Busy,211666,False,2002,74,0.735,0.824,10,-4.143,0,0.036,0.615,0.0,0.158,0.726,100.202,"hip hop, pop" -Eminem,"Lose Yourself - From ""8 Mile"" Soundtrack",322226,True,2004,77,0.686,0.735,2,-4.616,1,0.264,0.00921,0.00066,0.342,0.0596,171.355,hip hop -Beyoncé,Crazy In Love (feat. Jay-Z),236133,False,2003,76,0.646,0.77,2,-6.596,0,0.226,0.00249,0.0,0.0715,0.681,99.165,"pop, R&B" -Counting Crows,Big Yellow Taxi,225426,False,2003,50,0.669,0.873,8,-4.315,1,0.0494,0.00204,0.0,0.137,0.827,88.029,"rock, pop" -Black Eyed Peas,Where Is The Love?,272533,False,2003,68,0.835,0.687,5,-3.18,1,0.184,0.101,0.0,0.132,0.828,94.059,"hip hop, pop" -Jennifer Lopez,Jenny from the Block (feat. Jadakiss & Styles P.) - Track Masters Remix,187840,False,2002,70,0.845,0.768,6,-5.448,1,0.188,0.00733,5.04e-06,0.0575,0.96,100.0,"hip hop, pop, R&B" -Black Eyed Peas,Shut Up,296186,False,2003,56,0.81,0.714,2,-3.966,1,0.243,0.0637,0.0,0.237,0.561,112.968,"hip hop, pop" -t.A.T.u.,All The Things She Said,214440,True,2020,39,0.527,0.834,5,-5.767,0,0.0474,0.0411,0.00599,0.105,0.381,179.92,pop -Benny Benassi,Satisfaction (Isak Original Extended) - Benny Benassi Presents The Biz,285570,False,2003,66,0.793,0.698,11,-3.626,1,0.104,0.163,0.145,0.0745,0.339,130.017,"pop, Dance/Electronic" -Busta Rhymes,I Know What You Want (feat. Flipmode Squad),324306,True,2002,68,0.648,0.759,6,-4.315,1,0.306,0.0142,0.0,0.648,0.518,85.996,"hip hop, pop" -Panjabi MC,Mundian to Bach Ke,244666,False,2003,61,0.778,0.879,8,-4.951,0,0.0371,0.399,0.792,0.15,0.942,98.077,"World/Traditional, hip hop" -Mis-Teeq,Scandalous - U.S. Radio Edit,238840,False,2004,59,0.734,0.668,0,-3.935,0,0.0349,0.0526,0.0,0.0925,0.651,99.971,"pop, Dance/Electronic" -JAY-Z,03' Bonnie & Clyde,205560,True,2002,71,0.759,0.678,9,-5.148,0,0.314,0.23,0.0,0.15,0.327,89.64,hip hop -50 Cent,P.I.M.P.,249480,True,2003,76,0.712,0.772,10,-3.024,0,0.346,0.0521,4.35e-06,0.0368,0.848,84.722,"hip hop, pop" -Jamelia,Superstar,215480,False,2003,69,0.801,0.645,1,-6.93,1,0.0356,0.0457,0.0,0.0357,0.824,110.01,pop -Linkin Park,Faint,162600,False,2003,72,0.554,0.978,4,-3.554,1,0.131,0.111,0.0,0.0731,0.594,135.095,"rock, metal" -DMX,X Gon' Give It To Ya,217586,True,2007,70,0.761,0.899,10,-3.09,0,0.183,0.0135,0.0,0.0719,0.673,95.027,"hip hop, pop" -Evanescence,Bring Me To Life,235893,False,2003,79,0.331,0.943,4,-3.188,0,0.0698,0.00721,2.06e-06,0.242,0.296,94.612,metal -Sugababes,Hole In The Head,218173,False,2003,57,0.785,0.933,6,-4.629,0,0.0309,0.0303,0.0,0.137,0.962,125.011,"pop, R&B" -Craig David,Rise & Fall (feat. Sting),287226,False,2002,59,0.405,0.589,6,-7.51,0,0.0586,0.252,0.0,0.661,0.808,83.373,"hip hop, pop, R&B" -Three Days Grace,I Hate Everything About You,231480,False,2003,72,0.498,0.83,6,-5.157,0,0.0421,0.00461,0.0,0.139,0.453,89.342,"rock, metal" -Lil Jon & The East Side Boyz,Get Low,324600,False,2002,47,0.78,0.604,1,-8.019,1,0.0396,0.022,0.0,0.194,0.12,101.043,"hip hop, pop" -B2K,"Bump, Bump, Bump (feat. P. Diddy)",282773,False,2003,62,0.835,0.68,1,-6.02,0,0.21,0.1,0.0,0.049,0.889,95.508,"hip hop, pop, R&B" -Baby Bash,Suga Suga,239026,False,2003,73,0.662,0.748,5,-3.041,0,0.268,0.688,8.43e-06,0.0841,0.535,82.331,"hip hop, pop, R&B, latin" -Aaliyah,Miss You - Main,245240,False,2002,36,0.602,0.34,10,-9.867,1,0.0375,0.618,0.0,0.234,0.511,109.598,"hip hop, pop, R&B" -Britney Spears,Me Against the Music (feat. Madonna) - LP Version / Video Mix,223773,False,2003,59,0.804,0.836,6,-6.635,0,0.089,0.32,0.0,0.213,0.85,120.046,pop -Blue,Sorry Seems To Be The Hardest Word,210066,False,2004,57,0.564,0.6,7,-6.814,0,0.0305,0.183,2.19e-06,0.38,0.442,78.955,pop -Delta Goodrem,Lost Without You,248546,False,2003,54,0.581,0.747,7,-6.682,1,0.0283,0.0365,1.69e-06,0.193,0.514,146.301,pop -Justin Timberlake,Rock Your Body,267266,False,2002,76,0.892,0.714,4,-6.055,0,0.141,0.201,0.000234,0.0521,0.817,100.972,pop -The White Stripes,Seven Nation Army,231920,False,2003,3,0.741,0.469,4,-7.627,0,0.0805,0.00601,0.447,0.306,0.313,123.904,"rock, blues" -Youngbloodz,Damn! (feat. Lil' Jon) - Club Mix,298600,True,2003,54,0.772,0.724,6,-5.799,0,0.307,0.0149,0.0,0.355,0.518,84.038,"hip hop, pop" -Beyoncé,Baby Boy (feat. Sean Paul),244826,False,2003,63,0.655,0.488,1,-9.17,1,0.22,0.0825,1.16e-06,0.221,0.791,91.025,"pop, R&B" -Lil' Kim,Magic Stick,359973,True,2019,47,0.849,0.498,2,-7.872,1,0.272,0.116,4.49e-05,0.268,0.502,92.98,"hip hop, pop, R&B" -Christina Aguilera,Can't Hold Us Down (feat. Lil' Kim),255266,False,2002,61,0.859,0.658,8,-4.481,1,0.192,0.0326,0.00181,0.0651,0.538,98.989,pop -Nickelback,Someday,207466,False,2003,68,0.455,0.858,11,-5.659,0,0.0391,0.000616,0.000247,0.226,0.597,163.118,"rock, metal" -Blue,U Make Me Wanna - Radio Edit,222400,False,2003,48,0.6,0.736,7,-6.228,1,0.0329,0.325,0.0,0.137,0.848,158.108,pop -Ashanti,Rain On Me,297960,False,2003,50,0.606,0.699,8,-3.103,0,0.0337,0.343,1.89e-06,0.0757,0.509,83.701,"hip hop, pop, R&B" -Trapt,Headstrong,285569,True,2002,72,0.492,0.896,4,-5.819,0,0.117,0.000172,2.78e-06,0.189,0.534,184.981,"rock, metal" -Simply Red,Sunrise,199040,False,2008,58,0.791,0.733,5,-5.123,0,0.029,0.00189,0.201,0.0859,0.917,105.987,"rock, R&B, Folk/Acoustic, pop" -Lumidee,"Never Leave You (Uh Oooh, Uh Oooh)",184906,False,2003,61,0.811,0.657,6,-6.197,1,0.362,0.0391,0.712,0.0798,0.777,199.958,"pop, R&B" -Girls Aloud,Sound Of The Underground,221426,False,2003,58,0.568,0.867,11,-5.861,1,0.0365,0.00344,4.67e-05,0.328,0.88,163.944,"pop, Dance/Electronic" -Nelly,Shake Ya Tailfeather (feat. P. Diddy & Murphy Lee) - 2016 Remaster,293666,False,2016,52,0.527,0.808,1,-4.749,1,0.295,0.176,0.0,0.169,0.907,87.025,"hip hop, pop, R&B" -Room 5,Make Luv,212413,False,2003,57,0.883,0.887,1,-4.92,1,0.116,0.0111,0.63,0.0367,0.648,124.814,Dance/Electronic -Fatman Scoop,Be Faithful,164506,True,2009,49,0.649,0.713,7,-6.488,1,0.295,0.000787,0.0,0.318,0.629,101.129,hip hop -Limp Bizkit,Behind Blue Eyes,269973,False,2003,70,0.595,0.489,7,-6.66,1,0.0271,0.497,0.0,0.119,0.0902,120.315,metal -Sugababes,Too Lost In You,237693,False,2003,58,0.586,0.705,1,-4.904,0,0.0287,0.0485,0.0,0.15,0.459,98.014,"pop, R&B" -50 Cent,"Wanksta - From ""8 Mile"" Soundtrack",219400,True,2003,57,0.802,0.863,1,-3.552,1,0.2,0.0199,0.0,0.141,0.873,81.008,"hip hop, pop" -Pharrell Williams,Frontin' (feat. Jay-Z) - Club Mix,236506,True,2003,67,0.894,0.404,9,-5.913,0,0.13,0.453,1.57e-06,0.0801,0.759,102.009,"hip hop, pop" -Chingy,Right Thurr,250746,True,2003,69,0.866,0.749,2,-3.977,1,0.253,0.15,0.0,0.0614,0.891,92.074,"hip hop, pop, R&B" -Eminem,Sing For The Moment,339546,True,2002,71,0.668,0.824,5,-4.164,0,0.137,0.0017,3.05e-06,0.0426,0.185,163.961,hip hop -Lil' Kim,The Jump Off (feat. Mr. Cheeks) - Remix,234800,True,2003,48,0.853,0.752,1,-8.831,1,0.339,0.178,4.65e-06,0.0553,0.935,104.946,"hip hop, pop, R&B" -Mýa,My Love Is Like...Wo - Main Mix,209533,False,2003,48,0.831,0.695,4,-6.491,1,0.109,0.194,1.24e-05,0.309,0.722,132.817,"pop, R&B" -50 Cent,21 Questions,224440,True,2003,72,0.646,0.813,6,-3.846,0,0.299,0.349,9.37e-05,0.0427,0.895,92.729,"hip hop, pop" -Nelly,Air Force Ones,304000,True,2002,61,0.784,0.459,4,-9.74,0,0.317,0.0847,0.0,0.079,0.618,164.062,"hip hop, pop, R&B" -Missy Elliott,Gossip Folks (feat. Ludacris),234893,True,2002,57,0.707,0.538,0,-5.264,1,0.505,0.362,0.0,0.317,0.439,121.732,"hip hop, pop, R&B" -Girls Aloud,Jump,220360,False,2003,60,0.658,0.826,8,-6.031,1,0.0346,0.00189,0.00275,0.0492,0.795,134.465,"pop, Dance/Electronic" -Ultrabeat,Pretty Green Eyes - Radio Edit,201920,False,2003,59,0.553,0.775,1,-10.284,0,0.043,0.212,0.0,0.0728,0.51,139.993,"pop, Dance/Electronic" -Christina Aguilera,Fighter,245960,False,2002,67,0.435,0.92,4,-1.357,0,0.201,0.235,0.000353,0.552,0.45,188.899,pop -Frankie J,Don't Wanna Try,245293,False,2003,49,0.655,0.433,5,-7.577,0,0.0348,0.444,0.0,0.221,0.199,130.127,"hip hop, pop, R&B" -Snoop Dogg,Beautiful,299146,True,2005,67,0.893,0.74,11,-4.936,0,0.132,0.299,0.0,0.0881,0.963,101.025,"hip hop, pop" -Floetry,Say Yes,268373,False,2002,57,0.747,0.398,5,-12.932,0,0.0743,0.211,1.42e-06,0.113,0.428,117.901,"hip hop, pop, R&B" -Ginuwine,In Those Jeans,243306,True,2003,59,0.691,0.541,1,-5.873,1,0.0776,0.508,0.0,0.0753,0.319,127.683,"hip hop, pop, R&B" -Electric Six,Danger! High Voltage - Soulchild Radio Mix,214600,False,2003,0,0.66,0.698,11,-4.722,0,0.0302,1.92e-05,0.166,0.0517,0.511,123.005,rock -Rachel Stevens,Sweet Dreams My LA Ex - Radio Edit,208386,False,2003,38,0.85,0.862,2,-3.587,1,0.138,0.0733,0.00144,0.0936,0.908,130.036,"pop, Dance/Electronic" -Ludacris,Stand Up,213760,True,2003,57,0.751,0.84,6,-4.855,1,0.349,0.367,0.0,0.0916,0.801,99.057,"hip hop, pop" -Nas,I Can,253720,False,2002,58,0.837,0.885,6,-3.914,0,0.182,0.103,0.0,0.0666,0.694,95.313,hip hop -3 Doors Down,When I'm Gone,260333,False,2002,67,0.53,0.768,7,-5.611,1,0.0284,0.00385,0.0,0.103,0.374,148.095,"pop, rock, metal" -Tyrese,How You Gonna Act Like That,294693,False,2002,60,0.733,0.521,3,-3.657,0,0.0457,0.107,0.0,0.0692,0.525,112.913,"hip hop, pop, R&B" -Justin Timberlake,Cry Me a River,288333,False,2002,73,0.624,0.654,8,-6.582,0,0.183,0.577,0.0,0.104,0.564,73.898,pop -Westlife,Mandy,199320,False,2003,57,0.447,0.636,5,-5.08,1,0.0278,0.254,9.25e-05,0.11,0.376,105.678,pop -No Doubt,It's My Life,226053,False,2003,60,0.612,0.735,8,-5.074,0,0.0282,0.00202,0.00118,0.328,0.783,126.326,"rock, pop" -Amanda Perez,Angel,218760,False,2003,48,0.638,0.54,0,-6.849,1,0.0473,0.506,0.0,0.157,0.352,143.772,"pop, R&B" -JAY-Z,Excuse Me Miss,281240,True,2002,56,0.714,0.862,6,-5.531,1,0.286,0.0305,0.0,0.0884,0.887,92.849,hip hop -Coldplay,Clocks,307879,False,2002,79,0.577,0.749,5,-7.215,0,0.0279,0.599,0.0115,0.183,0.255,130.97,"rock, pop" -Kelly Rowland,Stole,249293,False,2002,54,0.649,0.718,7,-4.984,0,0.0594,0.00676,4.26e-06,0.174,0.477,79.993,"hip hop, pop, R&B" -Missy Elliott,Work It,263226,True,2002,70,0.884,0.677,1,-5.603,1,0.283,0.0778,0.0,0.0732,0.584,101.868,"hip hop, pop, R&B" -Ashanti,Rock Wit U (Awww Baby),209120,False,2003,65,0.71,0.797,4,-3.006,0,0.0582,0.408,0.0,0.16,0.849,94.998,"hip hop, pop, R&B" -Junior Senior,Move Your Feet,181826,False,2002,67,0.747,0.904,9,-2.623,1,0.0803,0.046,0.106,0.203,0.846,118.877,pop -Audioslave,Like a Stone,293960,False,2002,75,0.614,0.568,7,-5.477,0,0.0276,0.00797,0.0,0.0997,0.516,107.849,"rock, metal" -Sean Paul,Like Glue,232506,False,2002,61,0.757,0.78,1,-5.038,1,0.319,0.0811,0.0,0.113,0.59,97.917,"hip hop, pop" -Jaheim,Put That Woman First,245773,True,2002,55,0.649,0.536,5,-8.804,0,0.132,0.492,0.0,0.184,0.513,78.935,"pop, R&B" -Jennifer Lopez,All I Have (feat. LL Cool J),254466,False,2002,60,0.701,0.669,1,-5.265,1,0.107,0.271,0.0,0.158,0.446,83.066,"hip hop, pop, R&B" -Monica,So Gone,242773,False,2003,64,0.618,0.552,8,-6.863,1,0.0981,0.176,0.0,0.252,0.586,81.975,"pop, R&B" -Dido,White Flag,240040,False,2003,44,0.512,0.525,5,-6.823,1,0.0401,0.327,3.3e-06,0.081,0.294,169.951,pop -The Roots,The Seed (2.0),267933,True,2002,63,0.758,0.957,7,-2.281,1,0.0357,0.039,0.0,0.133,0.966,111.121,hip hop -Chingy,Holidae In,314400,True,2003,61,0.81,0.791,7,-5.909,1,0.181,0.0893,0.0,0.0838,0.948,153.067,"hip hop, pop, R&B" -Fabolous,Can't Let You Go (feat. Mike Shorey & Lil' Mo),223973,True,2003,61,0.646,0.6,9,-6.569,1,0.458,0.231,0.0,0.0794,0.811,192.082,"hip hop, pop, R&B" -Kelly Clarkson,Miss Independent,214773,False,2003,47,0.656,0.615,11,-6.359,0,0.137,0.0769,0.000415,0.0706,0.592,175.943,"pop, R&B" -Ja Rule,Mesmerize,278693,True,2002,61,0.769,0.646,1,-6.653,1,0.199,0.0566,0.0,0.406,0.433,90.079,"hip hop, pop, R&B" -Matchbox Twenty,Unwell,228706,False,2002,63,0.256,0.788,6,-5.263,0,0.0403,0.0298,0.0,0.707,0.429,80.718,"pop, rock" -Outkast,The Way You Move (feat. Sleepy Brown),234000,True,2003,66,0.871,0.597,5,-4.932,0,0.0464,0.126,0.000113,0.0638,0.635,125.999,"hip hop, pop" -Fabolous,Into You (feat. Tamia),295773,True,2003,67,0.546,0.538,7,-7.886,0,0.0523,0.233,0.0,0.118,0.58,182.12,"hip hop, pop, R&B" -Switchfoot,Meant to Live,201373,False,2003,61,0.397,0.903,2,-4.577,1,0.0457,0.00575,1.6e-06,0.362,0.556,151.551,"rock, pop" -Daniel Bedingfield,If You're Not The One,257026,False,2002,58,0.688,0.538,10,-7.608,1,0.0292,0.504,0.0,0.111,0.27,119.998,pop -Good Charlotte,Lifestyles of the Rich & Famous,190173,False,2002,68,0.62,0.93,1,-3.685,1,0.0374,0.00043,0.0,0.0686,0.609,106.22,"rock, metal" -Wayne Wonder,No Letting Go,202013,False,2003,63,0.496,0.819,5,-5.656,0,0.0444,0.0479,0.000215,0.177,0.317,100.215,"hip hop, R&B" -The Strokes,Reptilia,219826,False,2003,74,0.489,0.649,9,-5.11,1,0.0336,0.000603,0.713,0.101,0.77,158.009,rock -Avril Lavigne,I'm with You,223066,False,2002,70,0.457,0.406,9,-7.462,1,0.0291,0.08,0.0,0.117,0.208,151.95,pop -Will Young,Leave Right Now,214733,False,2003,55,0.641,0.445,6,-8.674,1,0.0368,0.145,0.0,0.108,0.383,81.931,pop -Usher,Yeah! (feat. Lil Jon & Ludacris),250373,False,2004,81,0.894,0.791,2,-4.699,1,0.112,0.0183,0.0,0.0388,0.583,105.018,"hip hop, pop, R&B" -Maroon 5,This Love,206200,False,2002,77,0.712,0.862,5,-4.612,0,0.0378,0.0525,0.0,0.093,0.809,95.051,pop -Britney Spears,Toxic,198800,False,2003,81,0.774,0.838,5,-3.914,0,0.114,0.0249,0.025,0.242,0.924,143.04,pop -Outkast,Hey Ya!,235213,False,2003,80,0.727,0.974,4,-2.261,0,0.0664,0.103,0.000532,0.174,0.965,79.526,"hip hop, pop" -Hoobastank,The Reason,232800,False,2003,79,0.472,0.671,4,-4.649,1,0.029,0.0129,0.0,0.159,0.0681,82.952,"pop, rock, metal" -Anastacia,Left Outside Alone,257426,False,2004,65,0.663,0.746,2,-3.567,0,0.0321,0.0697,0.0,0.0929,0.325,102.847,pop -Daddy Yankee,Gasolina,192600,False,2004,11,0.857,0.801,0,-6.499,1,0.0618,0.332,1.2e-06,0.0789,0.753,96.009,latin -Black Eyed Peas,Let's Get It Started - Spike Mix,217733,False,2004,53,0.785,0.799,11,-2.208,0,0.126,0.117,0.0,0.292,0.797,104.923,"hip hop, pop" -O-Zone,Dragostea Din Tei,215431,False,2004,56,0.809,0.965,0,-3.947,1,0.0427,0.177,0.0,0.0577,0.672,130.103,pop -Terror Squad,Lean Back,247426,True,2004,67,0.783,0.916,1,-3.344,1,0.415,0.11,0.0,0.0746,0.695,95.321,hip hop -Anastacia,Sick and Tired,212266,False,1999,56,0.598,0.868,0,-4.552,1,0.0373,0.00908,0.0,0.279,0.47,99.848,pop -Gwen Stefani,What You Waiting For?,221226,False,2004,60,0.676,0.948,5,-2.557,1,0.0628,0.0509,8.19e-06,0.384,0.731,136.027,"pop, R&B" -Ashanti,Only U - No Intro,186306,False,2004,56,0.601,0.621,6,-5.576,0,0.0434,0.0374,1.75e-06,0.188,0.278,94.547,"hip hop, pop, R&B" -The Rasmus,In the Shadows,257920,False,2003,68,0.6,0.796,2,-4.481,1,0.0275,0.000109,5.2e-05,0.484,0.75,105.991,metal -Houston,I Like That,236520,False,2004,59,0.797,0.502,10,-3.925,0,0.0968,0.0154,0.0,0.1,0.385,106.997,"hip hop, pop" -Beyoncé,Naughty Girl,208600,False,2003,63,0.735,0.466,6,-8.65,0,0.0857,0.239,6.24e-06,0.102,0.643,99.973,"pop, R&B" -Linkin Park,Numb,185586,False,2003,81,0.496,0.863,9,-4.153,1,0.0381,0.0046,0.0,0.639,0.243,110.018,"rock, metal" -Lloyd Banks,On Fire,187280,True,2004,61,0.759,0.825,8,-4.289,1,0.22,0.0923,0.00057,0.24,0.429,94.857,"hip hop, pop" -Akon,Locked Up,235066,True,2004,16,0.818,0.579,7,-4.475,1,0.101,0.023,0.0,0.107,0.354,89.987,pop -D12,My Band,298773,True,2004,73,0.851,0.849,1,-3.383,0,0.0828,0.497,2.05e-06,0.116,0.844,120.014,"hip hop, pop, rock" -Jay Sean,Eyes On You - Radio Mix,190493,False,2004,42,0.802,0.672,7,-4.971,1,0.0847,0.0775,0.0,0.136,0.619,96.525,"hip hop, pop, R&B" -Mario Winans,I Don't Wanna Know (feat. Enya & P. Diddy) - 2016 Remaster,257333,False,2016,66,0.833,0.515,11,-5.0,0,0.0462,0.347,0.00156,0.116,0.4,97.007,pop -Destiny's Child,Lose My Breath,242013,False,2004,61,0.814,0.899,1,-5.958,1,0.0637,0.00727,0.219,0.0979,0.545,119.011,"pop, R&B" -Eric Prydz,Call on Me - Radio Mix,171360,False,2004,72,0.597,0.837,10,-6.518,1,0.375,0.00427,0.00115,0.839,0.447,126.342,"pop, Dance/Electronic" -Ciara,Goodies (feat. Petey Pablo),223000,False,2004,67,0.826,0.647,5,-6.306,0,0.0491,0.0277,1.52e-06,0.298,0.872,102.017,"pop, R&B" -Kelis,Trick Me,206106,True,2003,63,0.97,0.72,1,-3.347,0,0.149,0.0369,0.000389,0.326,0.962,107.17,"pop, R&B, Dance/Electronic" -LL Cool J,Headsprung,267320,False,2004,56,0.718,0.786,0,-5.76,1,0.0284,0.0114,2.5e-05,0.0745,0.465,99.916,"hip hop, pop, R&B" -Spiderbait,Black Betty - Edit,205973,False,2004,63,0.562,0.865,7,-6.476,0,0.0922,2.06e-05,0.591,0.219,0.35,124.047,rock -George Michael,Amazing,265826,False,2004,57,0.805,0.754,10,-6.825,0,0.0394,0.0884,1.77e-06,0.117,0.88,128.429,set() -Britney Spears,My Prerogative,213893,False,2004,53,0.749,0.938,10,-4.423,0,0.118,0.0127,1.96e-06,0.103,0.619,111.014,pop -Nina Sky,Move Ya Body,232000,False,2004,67,0.87,0.712,11,-6.313,0,0.0432,0.0202,0.000331,0.0576,0.877,121.057,"pop, R&B" -Baby Bash,Suga Suga,239026,False,2003,73,0.662,0.748,5,-3.041,0,0.268,0.688,8.43e-06,0.0841,0.535,82.331,"hip hop, pop, R&B, latin" -Petey Pablo,Freek-A-Leek,235186,True,2003,56,0.737,0.697,2,-3.716,1,0.254,0.0756,0.0,0.359,0.662,104.917,"hip hop, pop" -JAY-Z,Dirt Off Your Shoulder,245173,True,2003,53,0.779,0.655,1,-7.147,1,0.318,0.0319,2.63e-06,0.339,0.645,163.971,hip hop -Juanes,La Camisa Negra,216706,False,2004,70,0.751,0.731,6,-4.419,0,0.0308,0.0838,0.0,0.0556,0.973,97.007,"pop, latin" -Avril Lavigne,Nobody's Home,212413,False,2004,61,0.348,0.907,5,-3.66,0,0.0497,0.000516,0.0,0.161,0.177,185.406,pop -Robbie Williams,Radio,233600,False,2004,33,0.607,0.874,5,-5.409,0,0.0349,0.00331,0.0494,0.26,0.846,130.015,"pop, rock" -Black Eyed Peas,Hey Mama,214893,True,2003,49,0.86,0.866,4,-6.345,0,0.232,0.104,1.42e-06,0.584,0.933,100.15,"hip hop, pop" -Ying Yang Twins,Salt Shaker,252440,True,2003,61,0.884,0.697,11,-6.022,1,0.128,0.0509,0.0,0.094,0.322,101.965,"hip hop, pop" -Enrique Iglesias,Not In Love - Radio Mix,223133,False,2003,37,0.762,0.885,0,-5.096,0,0.0395,0.0371,0.0,0.0519,0.886,117.021,"pop, latin" -Kelis,Milkshake,182626,False,2003,68,0.881,0.774,1,-6.068,1,0.0439,0.00986,0.0346,0.206,0.759,112.968,"pop, R&B, Dance/Electronic" -Linkin Park,Breaking the Habit,196906,False,2003,68,0.579,0.849,4,-5.218,0,0.0303,0.108,0.0,0.0909,0.581,100.021,"rock, metal" -JoJo,Leave (Get Out) - Radio Edit,242746,False,2005,49,0.656,0.513,5,-8.691,1,0.253,0.156,6.45e-05,0.0763,0.464,86.891,"hip hop, pop, R&B" -Christina Milian,Dip It Low,197186,False,2004,45,0.752,0.722,11,-4.207,0,0.104,0.0108,0.000536,0.0753,0.612,169.131,"hip hop, pop, R&B" -Avant,Read Your Mind,263506,False,2003,56,0.755,0.381,8,-6.696,0,0.0711,0.291,5.13e-06,0.0974,0.695,73.108,"pop, R&B" -Eminem,Just Lose It,248680,True,2004,67,0.94,0.633,8,-3.56,1,0.0467,0.0581,4.04e-05,0.281,0.962,121.003,hip hop -Sugababes,Too Lost In You,237693,False,2003,58,0.586,0.705,1,-4.904,0,0.0287,0.0485,0.0,0.15,0.459,98.014,"pop, R&B" -Snoop Dogg,Drop It Like It's Hot,266066,True,2004,39,0.892,0.628,1,-3.832,1,0.216,0.169,0.0,0.102,0.676,92.063,"hip hop, pop" -Sean Paul,I'm Still in Love with You (feat. Sasha),273360,False,2002,68,0.765,0.666,7,-5.384,1,0.172,0.102,0.0,0.116,0.756,87.002,"hip hop, pop" -The Shapeshifters,Lola's Theme - Radio Edit,207066,False,2004,65,0.748,0.845,5,-4.612,0,0.0536,0.00083,0.000225,0.068,0.659,123.925,Dance/Electronic -D12,How Come,249533,True,2004,57,0.745,0.858,9,-2.221,1,0.237,0.28,0.0,0.188,0.547,89.983,"hip hop, pop, rock" -Basement Jaxx,Good Luck,282306,False,2003,0,0.571,0.968,5,-3.092,1,0.162,0.0649,3.07e-06,0.592,0.313,154.07,"pop, Dance/Electronic" -Kevin Lyttle,Turn Me On,192106,False,2004,67,0.677,0.682,9,-6.879,1,0.0361,0.0405,0.0,0.0351,0.875,106.279,hip hop -Chingy,One Call Away,276800,False,2003,61,0.765,0.821,4,-5.926,0,0.183,0.0918,0.0,0.217,0.962,162.519,"hip hop, pop, R&B" -T.I.,Bring Em Out,216706,True,2004,63,0.759,0.891,11,-2.983,1,0.257,0.0298,0.0,0.141,0.587,98.579,"hip hop, pop" -DJ Casper,Cha Cha Slide - Hardino Mix,222146,False,2004,51,0.853,0.911,11,-6.722,0,0.125,0.0436,0.00571,0.287,0.802,131.012,set() -Kylie Minogue,I Believe in You,200973,False,2004,47,0.548,0.785,7,-5.087,1,0.0704,0.00277,0.048,0.329,0.418,120.939,"pop, Dance/Electronic" -Twista,Overnight Celebrity,233360,True,2004,66,0.828,0.792,10,-5.435,0,0.179,0.182,0.0,0.281,0.777,134.199,"hip hop, pop, R&B" -N.E.R.D,She Wants To Move,213786,False,2004,57,0.766,0.851,5,-4.831,1,0.0786,0.00402,0.000744,0.256,0.8,115.012,hip hop -Britney Spears,Everytime,230306,False,2003,63,0.398,0.284,3,-12.852,1,0.0337,0.966,8.57e-05,0.116,0.114,109.599,pop -U2,Vertigo,193520,False,2004,64,0.416,0.819,9,-3.974,1,0.0613,0.000138,0.00108,0.147,0.632,140.083,"World/Traditional, rock" -Franz Ferdinand,Take Me Out,237026,False,2004,77,0.277,0.663,4,-8.821,0,0.0377,0.000409,0.00051,0.136,0.527,104.561,rock -J-Kwon,Tipsy - Club Mix,247106,True,2001,59,0.925,0.741,9,-5.827,0,0.315,0.0653,0.0,0.044,0.654,93.04,"hip hop, pop" -G-Unit,Wanna Get To Know You,265026,True,2003,61,0.513,0.826,10,-3.651,0,0.132,0.106,0.0,0.0675,0.889,76.91,"hip hop, pop" -Lil' Flip,Sunshine (feat. Lea),225173,True,2004,42,0.814,0.387,0,-9.867,1,0.0946,0.0248,0.0,0.131,0.792,93.961,"hip hop, pop" -Kanye West,Jesus Walks,193733,True,2004,73,0.637,0.834,3,-4.686,0,0.323,0.614,0.0,0.317,0.715,87.312,hip hop -Evanescence,My Immortal,262533,False,2003,72,0.19,0.265,9,-9.206,1,0.0356,0.863,0.0,0.134,0.101,79.012,metal -Nelly,My Place,336506,True,2004,57,0.6,0.641,1,-4.237,0,0.0423,0.0498,1.49e-06,0.0982,0.642,163.153,"hip hop, pop, R&B" -Lemar,If There's Any Justice,229080,False,2004,46,0.706,0.665,6,-5.348,0,0.0453,0.0128,0.0,0.338,0.639,89.993,R&B -No Doubt,It's My Life,226053,False,2003,60,0.612,0.735,8,-5.074,0,0.0282,0.00202,0.00118,0.328,0.783,126.326,"rock, pop" -Avril Lavigne,Don't Tell Me,202013,False,2004,58,0.523,0.795,4,-2.92,1,0.0386,0.00462,0.0,0.358,0.484,144.106,pop -Twista,Slow Jamz,316053,True,2004,72,0.559,0.733,2,-5.659,0,0.171,0.314,0.0,0.284,0.611,145.115,"hip hop, pop, R&B" -Green Day,American Idiot,176346,True,2004,77,0.38,0.988,1,-2.042,1,0.0639,2.64e-05,7.86e-05,0.368,0.769,186.113,rock -Kelis,Millionaire,224933,False,2003,57,0.68,0.847,11,-6.636,1,0.108,0.0217,0.0,0.0374,0.75,176.051,"pop, R&B, Dance/Electronic" -Scissor Sisters,Comfortably Numb,266040,False,2004,43,0.803,0.924,7,-6.466,1,0.0391,0.237,0.0167,0.104,0.788,116.972,"pop, rock, Dance/Electronic" -Alicia Keys,Diary (feat. Tony! Toni! Tone! & Jermaine Paul),284160,False,2003,55,0.663,0.465,8,-9.711,0,0.0317,0.555,0.00183,0.125,0.279,127.932,"pop, R&B" -Kanye West,Through The Wire,221226,True,2004,76,0.571,0.739,7,-6.11,1,0.247,0.00865,5.21e-06,0.158,0.66,83.089,hip hop -Special D.,Come With Me - Radio Edit,185133,False,2004,61,0.739,0.999,7,-5.077,1,0.0803,0.13,0.00224,0.28,0.501,139.982,pop -Ludacris,Splash Waterfalls,290760,True,2003,48,0.734,0.846,11,-6.102,1,0.409,0.101,0.0,0.233,0.702,145.894,"hip hop, pop" -Avril Lavigne,My Happy Ending,242413,True,2004,68,0.414,0.936,2,-2.407,1,0.0758,0.00136,0.0,0.369,0.74,170.229,pop -Dido,White Flag,240040,False,2003,44,0.512,0.525,5,-6.823,1,0.0401,0.327,3.3e-06,0.081,0.294,169.951,pop -Eamon,Fuck It (I Don't Want You Back),225106,True,2004,64,0.828,0.653,0,-6.245,0,0.0653,0.214,3.25e-06,0.0404,0.575,68.507,set() -Beyoncé,"Me, Myself and I",301133,False,2003,62,0.75,0.458,1,-9.092,1,0.0803,0.226,3.25e-05,0.137,0.536,83.61,"pop, R&B" -3 Doors Down,Here Without You,238733,False,2002,74,0.557,0.533,10,-6.817,0,0.0252,0.0492,0.0,0.205,0.233,143.994,"pop, rock, metal" -Kanye West,All Falls Down,223506,True,2004,80,0.657,0.734,8,-4.832,0,0.484,0.149,0.0,0.139,0.434,91.03,hip hop -Twista,Sunshine (feat. Anthony Hamilton),226013,True,2004,56,0.926,0.888,11,-5.337,1,0.294,0.261,0.0,0.348,0.819,98.523,"hip hop, pop, R&B" -Katie Melua,The Closest Thing to Crazy,252466,False,2003,55,0.562,0.219,4,-13.2,1,0.0312,0.856,0.000296,0.0979,0.106,127.831,"pop, easy listening, jazz" -Usher,My Boo,223440,False,2004,76,0.662,0.507,5,-8.238,1,0.118,0.257,0.0,0.0465,0.676,86.412,"hip hop, pop, R&B" -Lloyd,Southside,277840,False,2004,59,0.688,0.404,9,-9.481,0,0.0818,0.377,0.0,0.0813,0.448,75.007,"hip hop, pop, R&B" -LMC,Take Me To The Clouds Above - LMC Vs. U2 / Radio Edit,171546,False,2004,59,0.668,0.905,6,-5.395,1,0.038,0.00232,0.0331,0.141,0.667,128.658,Dance/Electronic -The Streets,Fit but You Know It,254266,True,2004,58,0.68,0.844,6,-1.729,1,0.299,0.23,0.0,0.0305,0.895,172.302,"hip hop, Dance/Electronic" -JUVENILE,Slow Motion,248200,True,2003,64,0.713,0.734,0,-4.894,0,0.325,0.548,0.0,0.0687,0.89,172.872,"hip hop, pop" -Outkast,The Way You Move (feat. Sleepy Brown),234000,True,2003,66,0.871,0.597,5,-4.932,0,0.0464,0.126,0.000113,0.0638,0.635,125.999,"hip hop, pop" -Girls Aloud,Love Machine,205360,False,2004,59,0.663,0.95,7,-4.21,1,0.0621,0.00127,0.00492,0.0753,0.697,116.02,"pop, Dance/Electronic" -Jessica Simpson,With You,191826,False,2003,57,0.553,0.756,0,-4.583,1,0.108,0.0923,0.0,0.215,0.605,94.487,"pop, R&B" -Eminem,Like Toy Soldiers,296880,True,2004,48,0.52,0.768,8,-3.489,0,0.359,0.0193,0.00034,0.104,0.398,79.178,hip hop -Gary Jules,Mad World (Feat. Michael Andrews),189506,False,2001,65,0.345,0.0581,3,-17.217,1,0.0374,0.976,0.000366,0.103,0.304,174.117,pop -Ashlee Simpson,Pieces Of Me,217440,False,2004,56,0.505,0.799,2,-4.286,1,0.0773,0.0572,0.0,0.11,0.772,174.001,pop -Will Young,Leave Right Now,214733,False,2003,55,0.641,0.445,6,-8.674,1,0.0368,0.145,0.0,0.108,0.383,81.931,pop -Alicia Keys,You Don't Know My Name,366733,False,2003,61,0.264,0.663,9,-8.264,1,0.187,0.59,0.00443,0.342,0.249,167.078,"pop, R&B" -Mariah Carey,We Belong Together,201400,False,2005,69,0.84,0.476,0,-7.918,1,0.0629,0.0264,0.0,0.0865,0.767,139.987,"pop, R&B" -Mario,Let Me Love You,256733,False,2004,72,0.656,0.578,7,-8.97,0,0.0922,0.235,0.0,0.118,0.556,94.514,"pop, R&B" -Kanye West,Gold Digger,207626,True,2005,78,0.629,0.696,1,-5.572,0,0.348,0.0195,0.0,0.0554,0.623,93.034,hip hop -Rihanna,Pon de Replay,246960,False,2005,77,0.779,0.64,7,-8.415,1,0.159,0.000155,0.00077,0.101,0.498,99.019,"hip hop, pop, R&B" -50 Cent,Candy Shop,209106,True,2005,79,0.614,0.574,11,-7.961,1,0.466,0.0253,3.2e-05,0.38,0.755,125.173,"hip hop, pop" -James Blunt,You're Beautiful,209493,True,2005,75,0.675,0.479,0,-9.87,0,0.0278,0.633,1.76e-05,0.088,0.454,81.998,pop -Green Day,Boulevard of Broken Dreams,261266,True,2004,72,0.49,0.679,8,-3.68,1,0.0309,0.00394,1.54e-05,0.0383,0.506,167.046,rock -The Pussycat Dolls,Don't Cha,272080,False,2005,72,0.875,0.631,1,-3.475,1,0.099,0.00542,2.54e-06,0.127,0.549,120.003,"pop, R&B" -Daniel Powter,Bad Day,233640,False,2005,74,0.599,0.785,3,-4.013,1,0.0309,0.448,0.00336,0.151,0.52,140.046,pop -Akon,Bananza (Belly Dancer),238493,False,2004,28,0.878,0.699,10,-5.897,0,0.132,0.0345,3.37e-06,0.755,0.666,104.838,pop -Madonna,Hung Up,337733,False,2005,74,0.649,0.647,9,-7.695,0,0.0452,0.0039,0.161,0.0686,0.405,125.02,pop -50 Cent,Just A Lil Bit,237706,True,2005,76,0.489,0.692,1,-6.672,1,0.41,0.0322,0.00608,0.315,0.527,96.946,"hip hop, pop" -Crazy Frog,Axel F,168879,False,2005,67,0.86,0.907,2,-3.321,1,0.0407,0.279,0.693,0.0648,0.786,138.045,pop -Eminem,Ass Like That,265480,True,2004,52,0.646,0.796,2,-6.152,1,0.366,0.409,0.0,0.107,0.676,83.093,hip hop -Sean Paul,We Be Burnin',213066,False,2005,59,0.95,0.803,1,-4.195,1,0.151,0.145,0.0,0.0748,0.86,117.0,"hip hop, pop" -Gwen Stefani,Rich Girl,236213,False,2004,68,0.856,0.754,7,-2.728,0,0.0416,0.0294,1.49e-05,0.206,0.722,98.018,"pop, R&B" -The Chemical Brothers,Galvanize,393813,False,2005,63,0.745,0.714,7,-3.681,1,0.0751,0.0141,0.0222,0.363,0.365,104.003,Dance/Electronic -Fort Minor,Remember the Name (feat. Styles of Beyond),230493,True,2005,73,0.688,0.835,8,-4.162,1,0.0911,0.0583,2.87e-06,0.0795,0.88,84.858,"hip hop, pop, rock" -50 Cent,Disco Inferno,214226,True,2005,72,0.925,0.659,3,-4.763,1,0.231,0.206,7.88e-05,0.279,0.71,97.018,"hip hop, pop" -The Game,How We Do,235533,True,2005,74,0.862,0.648,4,-7.401,0,0.251,0.0455,0.0,0.0332,0.637,98.012,"hip hop, pop" -Chris Brown,Run It! (feat. Juelz Santana),229866,False,2005,71,0.846,0.482,1,-6.721,0,0.129,0.0246,0.0,0.393,0.212,100.969,"hip hop, pop, R&B" -Mariah Carey,It's Like That,203360,False,2005,0,0.8,0.633,8,-4.875,0,0.0514,0.0901,0.0,0.0315,0.836,95.953,"pop, R&B" -Black Eyed Peas,My Humps,326960,False,2005,67,0.802,0.682,1,-5.924,0,0.222,0.111,1.39e-05,0.109,0.586,123.95,"hip hop, pop" -Ciara,Oh (feat. Ludacris),256346,False,2004,61,0.8,0.496,7,-7.135,1,0.0506,0.000298,0.0,0.0697,0.357,128.29,"pop, R&B" -50 Cent,Outta Control - Remix,247506,True,2005,60,0.772,0.599,8,-5.996,0,0.246,0.0277,0.0,0.0839,0.557,92.029,"hip hop, pop" -Baby Bash,"Baby, I'm Back",219920,True,2005,4,0.899,0.365,9,-5.461,0,0.105,0.0508,0.0,0.097,0.749,100.01,"hip hop, pop, R&B, latin" -Jennifer Lopez,Get Right,225533,False,2005,65,0.741,0.759,0,-5.096,0,0.12,0.0218,0.0,0.628,0.362,97.084,"hip hop, pop, R&B" -JAY-Z,Numb / Encore,205733,True,2004,76,0.687,0.793,2,-4.254,1,0.166,0.0603,0.0,0.582,0.751,107.045,hip hop -The Game,Hate It Or Love It,206400,True,2005,77,0.802,0.785,9,-4.781,1,0.207,0.14,0.0,0.123,0.435,99.998,"hip hop, pop" -Gorillaz,Feel Good Inc.,222640,False,2005,82,0.818,0.705,6,-6.679,1,0.177,0.00836,0.00233,0.613,0.772,138.559,hip hop -Black Eyed Peas,Don't Phunk With My Heart,239773,False,2005,62,0.69,0.928,5,-2.76,0,0.061,0.00937,0.0,0.547,0.604,130.889,"hip hop, pop" -Akon,Lonely,235800,True,2004,21,0.629,0.532,5,-7.88,0,0.0352,0.331,0.0,0.238,0.619,90.098,pop -Bodyrockers,I Like The Way - Radio Edit,200053,False,2005,54,0.642,0.851,6,-3.638,1,0.0431,0.0248,0.0146,0.084,0.777,127.988,"rock, Dance/Electronic" -Sugababes,Push The Button,218093,False,2005,64,0.962,0.66,8,-5.096,1,0.061,0.0485,0.0,0.076,0.814,126.016,"pop, R&B" -Jeezy,Soul Survivor,280013,True,2005,55,0.684,0.493,10,-7.728,1,0.0459,0.000839,0.0,0.157,0.08,84.03,"hip hop, pop" -Eminem,Mockingbird,250760,True,2004,77,0.637,0.678,0,-3.798,1,0.266,0.209,0.0,0.156,0.254,84.039,hip hop -Shakira,La Tortura (feat. Alejandro Sanz),212893,False,2005,72,0.74,0.783,0,-5.367,1,0.0427,0.0297,0.000308,0.123,0.812,100.011,"pop, latin" -Gorillaz,DARE,244999,False,2005,73,0.76,0.891,11,-5.852,0,0.0372,0.0229,0.0869,0.298,0.966,120.264,hip hop -Snoop Dogg,Signs,236813,True,2004,54,0.94,0.713,7,-5.308,1,0.127,0.0319,0.00076,0.325,0.666,112.955,"hip hop, pop" -Robbie Williams,Tripping,276603,False,2005,58,0.666,0.922,2,-4.458,0,0.0429,0.0381,0.000191,0.0633,0.828,118.014,"pop, rock" -David Banner,Play,230133,True,2005,52,0.837,0.658,1,-8.013,0,0.126,0.000213,0.122,0.243,0.586,95.027,"hip hop, pop" -Trillville,Some Cut,283454,True,2004,50,0.9,0.515,1,-6.626,1,0.372,0.00341,0.0,0.111,0.659,84.995,"hip hop, pop" -D4L,Laffy Taffy,224253,True,2005,63,0.891,0.439,7,-7.994,0,0.428,0.0351,0.0,0.0932,0.622,77.499,"hip hop, pop" -Missy Elliott,Lose Control (feat. Ciara & Fat Man Scoop),226863,True,2005,67,0.904,0.813,4,-7.105,0,0.121,0.0311,0.00697,0.0471,0.81,125.461,"hip hop, pop, R&B" -Rob Thomas,Lonely No More,226640,False,2005,56,0.551,0.896,9,-3.152,0,0.109,0.033,0.0,0.0899,0.858,171.79,"pop, Folk/Acoustic" -Brian McFadden,Almost Here,229826,False,2004,42,0.561,0.452,9,-7.324,0,0.0336,0.409,0.0,0.106,0.316,77.984,set() -Bobby V.,"Slow Down - 12"" Version",258666,False,2005,65,0.568,0.732,11,-4.896,0,0.103,0.179,4.92e-06,0.401,0.626,186.048,"hip hop, pop, R&B" -Backstreet Boys,Incomplete,239586,False,2005,63,0.437,0.589,4,-4.834,1,0.0331,0.231,0.0,0.0768,0.165,133.631,pop -Pretty Ricky,Grind With Me,237733,True,2005,63,0.7,0.538,1,-5.952,1,0.045,0.000204,0.0,0.105,0.189,136.518,"hip hop, pop, R&B" -Kanye West,Diamonds From Sierra Leone - Remix,233400,True,2005,61,0.672,0.814,7,-3.768,1,0.27,0.258,0.0,0.164,0.379,97.043,hip hop -Snoop Dogg,Drop It Like It's Hot,266066,True,2004,39,0.892,0.628,1,-3.832,1,0.216,0.169,0.0,0.102,0.676,92.063,"hip hop, pop" -Fat Joe,Get It Poppin' (feat. Nelly) - Radio Version,211320,False,2005,41,0.905,0.55,1,-7.558,1,0.372,0.0628,0.0,0.0644,0.52,100.813,"hip hop, pop" -Destiny's Child,Soldier (feat. T.I. & Lil' Wayne),325573,False,2004,63,0.878,0.417,7,-6.799,1,0.361,0.0444,0.0,0.0833,0.904,77.49,"pop, R&B" -Ciara,"1, 2 Step (feat. Missy Elliott) - Main",202213,False,2005,52,0.939,0.498,5,-10.94,0,0.161,0.0444,0.00128,0.0475,0.801,113.053,"pop, R&B" -Amerie,1 Thing,238746,False,2008,52,0.636,0.946,10,-4.683,0,0.332,0.115,3.75e-05,0.0416,0.891,125.085,"hip hop, pop, R&B" -Ludacris,Get Back,270746,True,2004,59,0.555,0.639,1,-5.16,1,0.263,0.0594,0.0,0.118,0.184,171.609,"hip hop, pop" -Franz Ferdinand,Do You Want To,215000,False,2005,61,0.412,0.944,0,-2.896,1,0.0448,0.0724,0.000493,0.801,0.79,123.091,rock -Natalie Imbruglia,Shiver,222653,False,2005,52,0.489,0.798,1,-4.49,1,0.0316,0.00115,2.81e-06,0.149,0.309,94.163,"pop, Folk/Acoustic" -Gwen Stefani,Hollaback Girl,199853,True,2004,69,0.926,0.916,10,-2.221,0,0.0929,0.35,6.17e-06,0.0234,0.904,110.007,"pop, R&B" -Pretty Ricky,Your Body,240040,True,2005,59,0.838,0.594,4,-5.695,0,0.113,0.176,0.0,0.0867,0.507,90.038,"hip hop, pop, R&B" -Thirty Seconds To Mars,The Kill,231533,False,2005,74,0.179,0.912,4,-3.881,0,0.0791,0.0014,0.000294,0.582,0.289,182.99,rock -Alicia Keys,Karma,256000,False,2003,50,0.727,0.736,10,-6.203,1,0.0615,0.0743,0.00311,0.0348,0.687,85.098,"pop, R&B" -2Pac,Ghetto Gospel,238053,True,2004,68,0.794,0.614,5,-5.352,0,0.0467,0.0964,0.0,0.0788,0.663,80.569,hip hop -Craig David,Don't Love You No More (I'm Sorry),243986,False,2005,56,0.636,0.558,7,-7.076,1,0.0245,0.462,0.0,0.062,0.572,100.113,"hip hop, pop, R&B" -Katie Melua,Nine Million Bicycles,197160,False,2005,60,0.534,0.247,0,-15.636,1,0.0322,0.516,0.00859,0.122,0.344,82.168,"pop, easy listening, jazz" -Lil Jon & The East Side Boyz,Lovers And Friends,260600,True,2004,66,0.675,0.501,10,-6.183,1,0.22,0.0541,0.0,0.429,0.532,139.861,"hip hop, pop" -Mariah Carey,Don't Forget About Us - Radio Edit,233866,False,2005,0,0.7,0.529,10,-6.816,0,0.0395,0.0422,0.0,0.101,0.386,143.555,"pop, R&B" -Ying Yang Twins,Wait (The Whisper Song),179160,True,2005,57,0.933,0.513,7,-13.203,1,0.347,0.00112,0.00248,0.107,0.595,102.017,"hip hop, pop" -Will Smith,Switch,197666,False,2020,34,0.873,0.9,5,-4.325,0,0.18,0.0107,0.000502,0.556,0.478,102.516,"hip hop, pop" -Bow Wow,Like You (feat. Ciara),205840,False,2005,66,0.714,0.6,6,-8.365,1,0.0876,0.0465,0.0,0.0646,0.504,82.017,"hip hop, pop, R&B" -Kaiser Chiefs,Everyday I Love You Less And Less,217706,False,2005,58,0.49,0.956,1,-4.556,0,0.0407,0.00466,6.15e-06,0.242,0.577,160.028,rock -Kelly Clarkson,Behind These Hazel Eyes,198973,False,2004,59,0.548,0.889,9,-4.682,1,0.0382,0.00165,0.00109,0.197,0.425,90.048,"pop, R&B" -Black Eyed Peas,Don't Lie,219000,False,2005,61,0.662,0.785,9,-5.299,1,0.186,0.16,0.0,0.0784,0.604,89.885,"hip hop, pop" -Westlife,You Raise Me Up,241066,False,2005,1,0.129,0.358,10,-6.596,1,0.0314,0.631,0.0,0.0886,0.172,75.048,pop -Ray J,One Wish,337640,False,2005,62,0.525,0.652,5,-7.042,0,0.28,0.278,0.0,0.339,0.602,127.673,"hip hop, pop, R&B" -Papa Roach,Scars,208199,False,2004,60,0.455,0.929,11,-3.295,1,0.0494,0.0875,0.0,0.226,0.484,89.782,"rock, metal" -Arctic Monkeys,I Bet You Look Good On The Dancefloor,173680,False,2006,71,0.535,0.948,6,-4.19,0,0.0356,0.00225,0.0,0.376,0.778,103.183,rock -Shayne Ward,That's My Goal,219600,False,2006,1,0.286,0.508,2,-5.966,1,0.0336,0.372,0.0,0.27,0.14,68.637,pop -Bow Wow,Let Me Hold You (feat. Omarion),248493,False,2005,62,0.815,0.662,6,-8.244,1,0.199,0.0915,0.000409,0.0898,0.611,151.518,"hip hop, pop, R&B" -Kaiser Chiefs,I Predict A Riot,233186,False,2005,63,0.395,0.979,8,-3.241,1,0.0766,0.0243,0.0,0.12,0.41,158.842,rock -Usher,Caught Up,224640,False,2004,61,0.817,0.806,6,-4.606,1,0.0623,0.027,9e-06,0.119,0.805,110.086,"hip hop, pop, R&B" -Kelly Clarkson,Because of You,219493,False,2004,71,0.587,0.583,5,-5.284,0,0.0313,0.248,0.0,0.124,0.15,139.92,"pop, R&B" -3 Doors Down,Let Me Go,243053,False,2012,52,0.478,0.863,1,-4.914,0,0.0387,0.0181,0.0,0.111,0.491,92.004,"pop, rock, metal" -Nickelback,Photograph,258920,False,2005,67,0.515,0.876,8,-3.756,1,0.0292,0.000932,0.000166,0.136,0.385,145.916,"rock, metal" -T-Pain,I'm Sprung,231040,False,2005,0,0.722,0.329,0,-11.617,0,0.108,0.088,0.0,0.081,0.166,99.991,"hip hop, pop, R&B" -Destiny's Child,Cater 2 U,245400,False,2004,58,0.605,0.584,11,-7.043,0,0.197,0.286,0.0,0.235,0.464,125.802,"pop, R&B" -The Pussycat Dolls,Stickwitu,207506,False,2005,67,0.548,0.554,9,-6.408,1,0.0587,0.283,0.0,0.0708,0.382,79.918,"pop, R&B" -Ryan Cabrera,True,204173,False,2004,62,0.598,0.4,6,-10.054,1,0.0241,0.459,0.0,0.151,0.369,96.938,pop -Stereophonics,Dakota,297426,False,2005,69,0.506,0.93,4,-3.499,1,0.0675,0.129,0.00682,0.0931,0.305,146.994,"rock, pop" -Eminem,Like Toy Soldiers,296880,True,2004,67,0.52,0.768,8,-3.489,0,0.359,0.0193,0.00034,0.104,0.398,79.178,hip hop -Nelly,Over And Over,253933,True,2004,61,0.644,0.517,7,-10.02,1,0.0803,0.0065,1.37e-06,0.158,0.464,169.799,"hip hop, pop, R&B" -Coldplay,Speed of Sound,287906,False,2005,69,0.514,0.898,11,-6.765,0,0.0577,0.00488,0.0345,0.0746,0.353,123.067,"rock, pop" -KT Tunstall,Suddenly I See,201706,False,2005,71,0.587,0.767,0,-5.713,1,0.0449,0.225,0.0,0.112,0.664,100.38,"World/Traditional, pop, Folk/Acoustic" -Destiny's Child,Girl,224146,False,2004,55,0.567,0.747,6,-6.019,0,0.105,0.31,0.0,0.04,0.556,89.036,"pop, R&B" -Kelly Clarkson,Since U Been Gone,188960,False,2004,71,0.662,0.739,9,-5.354,0,0.0322,0.00206,0.0603,0.113,0.382,130.999,"pop, R&B" -U2,Sometimes You Can't Make It On Your Own,305080,False,2004,56,0.53,0.597,2,-5.652,1,0.0247,0.00275,0.473,0.105,0.346,96.0,"World/Traditional, rock" -The All-American Rejects,Dirty Little Secret,193653,False,2005,65,0.469,0.955,10,-4.253,1,0.0432,0.000343,1.35e-06,0.548,0.462,143.853,"rock, pop" -Toby Keith,As Good As I Once Was,227840,False,2005,67,0.68,0.87,5,-4.461,1,0.0393,0.266,0.0,0.109,0.646,119.075,country -Gwen Stefani,Cool,189333,False,2004,59,0.709,0.721,2,-3.52,1,0.0281,0.0322,0.000309,0.355,0.801,112.019,"pop, R&B" -Foo Fighters,Best of You,255626,False,2005,76,0.366,0.94,1,-5.119,0,0.0696,0.000769,9.42e-05,0.188,0.369,130.198,"rock, metal" -Shakira,Hips Don't Lie (feat. Wyclef Jean),218093,False,2005,82,0.778,0.824,10,-5.892,0,0.0707,0.284,0.0,0.405,0.758,100.024,"pop, latin" -Sean Paul,Temperature,218573,False,2005,78,0.951,0.6,0,-4.675,0,0.0685,0.106,0.0,0.0712,0.822,125.04,"hip hop, pop" -Justin Timberlake,SexyBack (feat. Timbaland),242733,True,2006,78,0.967,0.583,7,-5.562,0,0.0789,0.0584,0.0,0.0519,0.964,117.0,pop -Beyoncé,Irreplaceable,227853,False,2007,70,0.447,0.694,7,-4.637,0,0.382,0.0293,5.46e-06,0.167,0.509,175.868,"pop, R&B" -Amy Winehouse,Rehab,214946,False,2006,71,0.407,0.558,0,-13.609,1,0.0552,0.0541,1.51e-06,0.34,0.777,71.815,R&B -Rihanna,SOS,238920,False,2006,73,0.677,0.671,7,-4.905,1,0.0361,0.004,5.07e-05,0.413,0.527,137.046,"hip hop, pop, R&B" -Gnarls Barkley,Crazy,177466,False,2006,74,0.855,0.587,8,-4.589,1,0.0336,0.0505,0.00714,0.105,0.64,111.97,"hip hop, R&B" -Busta Rhymes,Touch It,214960,True,2006,61,0.658,0.551,1,-6.62,1,0.37,0.0043,0.000108,0.197,0.442,138.335,"hip hop, pop" -Nelly Furtado,Maneater,258893,False,2006,79,0.796,0.777,6,-4.81,1,0.0397,0.0261,0.000358,0.121,0.787,132.722,"hip hop, pop, latin" -Bob Sinclar,Rock This Party - Everybody Dance Now,245466,False,2006,69,0.893,0.823,5,-3.708,0,0.122,0.0714,0.0,0.0509,0.902,128.009,"pop, Dance/Electronic" -Diddy,Come to Me (feat. Nicole Scherzinger),276786,True,2006,55,0.819,0.485,6,-6.45,0,0.0756,0.0176,0.0,0.658,0.386,96.998,"hip hop, pop" -Madonna,Sorry,281880,False,2005,61,0.589,0.877,0,-6.229,0,0.046,0.00121,0.013,0.169,0.443,132.971,pop -Chamillionaire,Ridin',303053,True,2005,71,0.787,0.799,8,-4.68,0,0.0993,0.189,0.0,0.3,0.835,143.052,"hip hop, pop" -Black Eyed Peas,Pump It,213066,False,2005,75,0.648,0.931,1,-3.15,0,0.181,0.00937,0.0,0.752,0.744,153.649,"hip hop, pop" -Nelly Furtado,Promiscuous,242293,False,2006,82,0.808,0.97,10,-6.098,0,0.0506,0.0569,6.13e-05,0.154,0.868,114.328,"hip hop, pop, latin" -Fedde Le Grand,Put Your Hands Up for Detroit - Radio Edit,150533,False,2015,39,0.814,0.923,7,-4.525,1,0.229,0.0154,0.154,0.0862,0.518,127.919,"pop, Dance/Electronic" -Sean Paul,(When You Gonna) Give It Up to Me (feat. Keyshia Cole) - Radio Version,243880,False,2006,58,0.711,0.761,8,-3.04,1,0.225,0.067,0.0,0.041,0.718,95.824,"hip hop, pop" -The Pussycat Dolls,Buttons,225560,False,2005,68,0.565,0.817,2,-4.338,1,0.27,0.152,0.0,0.247,0.477,210.851,"pop, R&B" -Ciara,Get Up (feat. Chamillionaire),261880,False,2006,59,0.964,0.595,1,-6.887,0,0.109,0.0248,5.14e-06,0.0405,0.629,128.593,"pop, R&B" -Eminem,Shake That,274440,True,2005,74,0.963,0.643,1,-5.785,0,0.117,0.0507,4.94e-05,0.157,0.534,107.005,hip hop -Cherish,Do It To It,226293,False,2006,65,0.826,0.67,2,-5.559,1,0.169,0.00798,0.0,0.495,0.794,74.007,"hip hop, pop, R&B" -Muse,Supermassive Black Hole,212439,False,2006,75,0.668,0.921,7,-3.727,1,0.0439,0.0492,0.00517,0.0877,0.782,120.0,rock -Chris Brown,Gimme That,186826,False,2005,63,0.678,0.572,11,-7.033,0,0.0466,0.00374,6.59e-05,0.118,0.444,87.165,"hip hop, pop, R&B" -Infernal,From Paris to Berlin,209666,False,2004,57,0.741,0.869,9,-6.534,0,0.0816,0.096,0.0,0.0396,0.839,126.069,pop -Cassie,Me & U,192213,False,2006,73,0.803,0.454,8,-4.802,0,0.0294,0.352,0.0,0.0655,0.739,99.99,"pop, R&B" -E-40,U and Dat (feat. T. Pain & Kandi Girl),202720,True,2006,63,0.858,0.484,6,-7.448,0,0.277,0.0241,1.8e-06,0.123,0.329,99.992,"hip hop, pop" -The Pussycat Dolls,Beep,229360,False,2005,57,0.938,0.735,7,-6.382,1,0.0434,0.00952,0.0,0.0998,0.55,103.7,"pop, R&B" -Lil Jon,Snap Yo Fingers,274386,False,2006,67,0.835,0.757,8,-5.029,0,0.23,0.00647,0.0,0.29,0.626,82.038,"hip hop, pop" -Armand Van Helden,My My My - Radio Edit,190000,False,2006,40,0.678,0.768,7,-8.502,1,0.137,0.0195,0.00866,0.0322,0.758,127.51,"pop, Dance/Electronic" -The Notorious B.I.G.,"Nasty Girl (feat. Diddy, Nelly, Jagged Edge & Avery Storm) - 2005 Remaster",286186,True,2005,68,0.833,0.628,2,-7.041,1,0.141,0.0949,1.6e-06,0.282,0.645,106.328,hip hop -Yung Joc,It's Goin' Down (feat. Nitti),241840,True,2006,66,0.888,0.577,1,-7.702,0,0.0612,0.0986,0.0,0.131,0.609,84.003,"hip hop, pop" -Shayne Ward,No Promises,223066,False,2006,1,0.5,0.498,4,-6.087,1,0.026,0.527,0.0,0.123,0.182,79.798,pop -Justin Timberlake,My Love (feat. T.I.),276160,False,2006,72,0.771,0.68,11,-5.881,0,0.224,0.277,1.18e-05,0.682,0.808,119.952,pop -Nickelback,Savin' Me,219320,False,2005,65,0.441,0.815,3,-4.088,1,0.0276,0.000731,0.0,0.414,0.522,164.007,"rock, metal" -Nelly,Grillz,271160,False,2005,60,0.867,0.504,4,-7.737,0,0.241,0.0358,0.0,0.307,0.84,82.996,"hip hop, pop, R&B" -Take That,Patience,202066,False,2006,64,0.309,0.783,10,-4.154,1,0.0405,0.142,1.11e-06,0.118,0.372,175.788,pop -Fergie,London Bridge,241306,True,2006,57,0.769,0.609,1,-5.894,1,0.357,0.216,0.0,0.166,0.633,90.951,"pop, R&B" -Cascada,Everytime We Touch - Radio Edit,199120,False,2011,0,0.64,0.977,8,-5.369,1,0.0555,0.00461,1.68e-05,0.371,0.493,142.019,"hip hop, pop, Dance/Electronic" -D4L,Laffy Taffy,224253,True,2005,63,0.891,0.439,7,-7.994,0,0.428,0.0351,0.0,0.0932,0.622,77.499,"hip hop, pop" -LL Cool J,Control Myself,233973,False,2006,42,0.825,0.876,10,-6.556,0,0.211,0.00928,1.2e-05,0.205,0.48,120.066,"hip hop, pop, R&B" -Beyoncé,Deja Vu (feat. Jay-Z),240280,False,2007,57,0.644,0.746,7,-4.941,1,0.341,0.0071,1.22e-05,0.0743,0.355,105.253,"pop, R&B" -P!nk,Stupid Girls,197173,True,2006,0,0.683,0.886,4,-5.045,0,0.071,0.000738,0.00188,0.0524,0.566,100.04,pop -Eminem,When I'm Gone,281320,True,2005,68,0.618,0.746,10,-5.476,0,0.352,0.0553,0.0,0.273,0.75,75.272,hip hop -Kelis,Bossy,273973,False,2006,50,0.906,0.633,4,-5.316,1,0.19,0.000182,8.34e-06,0.058,0.779,84.021,"pop, R&B, Dance/Electronic" -Bob Sinclar,Love Generation - Radio Edit,207613,False,2005,49,0.715,0.812,1,-5.758,1,0.062,0.175,0.000134,0.044,0.554,128.04,"pop, Dance/Electronic" -Meck,Thunder in My Heart Again (Radio Edit),189800,False,2005,46,0.679,0.922,2,-4.67,0,0.0294,0.000241,0.69,0.0697,0.806,129.016,set() -Mary J. Blige,Be Without You - Kendu Mix,246333,True,2005,69,0.726,0.7,2,-5.881,0,0.0998,0.0717,0.0,0.262,0.673,146.559,"pop, R&B" -Girls Aloud,Something Kinda Ooooh,201590,False,2012,49,0.681,0.908,11,-3.918,0,0.0473,0.0232,0.0,0.355,0.879,131.925,"pop, Dance/Electronic" -Rihanna,Unfaithful,226973,False,2006,70,0.588,0.391,0,-8.607,0,0.0334,0.839,0.0,0.227,0.349,144.069,"hip hop, pop, R&B" -Kanye West,Touch The Sky,236600,True,2005,73,0.552,0.846,9,-4.912,1,0.342,0.0114,0.0,0.309,0.554,98.56,hip hop -Dem Franchize Boyz,"Lean Wit It, Rock Wit It",229813,True,2006,60,0.886,0.62,5,-5.854,1,0.307,0.082,0.0,0.1,0.6,76.035,"hip hop, pop" -Evanescence,Call Me When You're Sober,214706,False,2006,65,0.45,0.883,7,-4.094,1,0.0524,0.00193,0.0,0.293,0.328,93.41,metal -Chingy,Pullin' Me Back,234133,True,2006,62,0.786,0.511,7,-6.66,1,0.286,0.0825,0.0,0.12,0.587,80.0,"hip hop, pop, R&B" -Nickelback,Far Away,238173,False,2005,70,0.518,0.797,6,-5.153,1,0.0309,0.000681,0.0,0.107,0.293,132.918,"rock, metal" -Fall Out Boy,"Dance, Dance",180266,False,2005,74,0.622,0.961,11,-3.198,0,0.154,0.00523,0.0,0.0854,0.449,114.452,rock -Leona Lewis,A Moment Like This,257293,False,2007,57,0.261,0.562,8,-3.667,1,0.0301,0.423,7.54e-06,0.131,0.167,70.543,"pop, R&B" -T.I.,What You Know,274333,True,2006,64,0.551,0.81,11,-4.365,1,0.0344,0.0155,0.0,0.169,0.332,73.464,"hip hop, pop" -Ludacris,Money Maker,230613,True,2006,62,0.551,0.598,1,-6.79,1,0.27,0.15,0.0,0.299,0.612,78.756,"hip hop, pop" -Nick Lachey,What's Left Of Me,244613,False,2006,37,0.42,0.742,7,-5.665,1,0.0375,0.142,0.0,0.145,0.294,149.965,pop -Jibbs,Chain Hang Low,207586,True,2006,56,0.792,0.589,4,-6.869,0,0.262,0.0259,0.000128,0.114,0.778,157.147,"hip hop, pop" -Arctic Monkeys,When The Sun Goes Down,202133,True,2006,72,0.348,0.875,11,-4.758,0,0.199,0.0341,0.0,0.117,0.407,169.152,rock -Rascal Flatts,Life is a Highway,276773,False,2006,0,0.561,0.936,5,-5.409,1,0.0613,0.00185,0.0,0.102,0.594,103.055,country -Beyoncé,Check On It (feat. Slim Thug),211186,False,2006,45,0.7,0.887,7,-3.887,1,0.218,0.0805,0.0,0.376,0.883,166.104,"pop, R&B" -Keyshia Cole,Love,255333,False,2005,72,0.688,0.519,5,-4.285,1,0.0283,0.064,0.0,0.1,0.318,116.714,"hip hop, pop, R&B" -Lily Allen,Smile,196893,True,2006,68,0.632,0.639,5,-5.938,1,0.0259,0.00143,0.0,0.221,0.733,95.506,pop -Ne-Yo,Sexy Love,220853,False,2006,67,0.693,0.516,8,-6.446,1,0.0413,0.297,0.000127,0.0604,0.494,94.02,"pop, R&B" -JoJo,Too Little Too Late,223680,False,2006,48,0.535,0.823,6,-5.338,0,0.0431,0.0113,3.88e-06,0.146,0.685,83.001,"hip hop, pop, R&B" -Mary J. Blige,One,260466,False,2005,64,0.344,0.874,0,-3.161,1,0.0777,0.0841,0.0,0.129,0.269,185.888,"pop, R&B" -Bubba Sparxxx,Ms. New Booty,252653,True,2005,62,0.64,0.62,1,-5.931,1,0.416,0.016,0.0,0.0831,0.609,129.37,"hip hop, pop, country" -Amy Winehouse,You Know I'm No Good,256946,False,2006,63,0.705,0.806,9,-3.607,1,0.0309,0.0132,0.00428,0.0701,0.734,103.383,R&B -Rascal Flatts,What Hurts The Most,214106,False,2006,69,0.537,0.674,5,-5.134,0,0.0277,0.0088,0.0,0.265,0.33,136.002,country -Red Hot Chili Peppers,Dani California,282160,False,2006,76,0.556,0.913,0,-2.36,1,0.0437,0.0193,8.59e-06,0.346,0.73,96.184,rock -Scissor Sisters,I Don't Feel Like Dancin',288360,False,2006,63,0.707,0.923,7,-3.409,1,0.0276,0.0195,0.0,0.342,0.845,108.023,"pop, rock, Dance/Electronic" -Ne-Yo,So Sick,207186,False,2006,75,0.452,0.574,6,-8.336,1,0.31,0.246,0.0,0.189,0.58,92.791,"pop, R&B" -Hinder,Lips Of An Angel,261053,False,2019,35,0.474,0.744,2,-5.386,1,0.0341,0.0208,1.22e-06,0.209,0.238,129.005,"pop, rock, metal" -The Raconteurs,"Steady, As She Goes",215266,False,2006,56,0.525,0.578,9,-4.563,1,0.12,0.0132,0.00916,0.108,0.537,123.696,rock -The Fratellis,Chelsea Dagger,215306,False,2006,68,0.511,0.815,7,-3.14,1,0.144,0.0491,0.0,0.0826,0.586,154.514,rock -Fort Minor,Where'd You Go (feat. Holly Brook & Jonah Matranga),231866,True,2005,42,0.684,0.819,1,-3.309,0,0.238,0.262,0.00197,0.113,0.25,179.999,"hip hop, pop, rock" -The Pussycat Dolls,Stickwitu,207506,False,2005,67,0.548,0.554,9,-6.408,1,0.0587,0.283,0.0,0.0708,0.382,79.918,"pop, R&B" -Panic! At The Disco,I Write Sins Not Tragedies,185586,False,2005,70,0.566,0.815,9,-4.481,0,0.14,0.0737,0.0,0.12,0.672,169.961,rock -James Blunt,Goodbye My Lover,258653,False,2005,67,0.432,0.261,4,-12.591,1,0.0512,0.953,0.00342,0.0903,0.273,89.567,pop -Christina Aguilera,Ain't No Other Man,228906,False,2006,63,0.862,0.742,7,-4.722,1,0.222,0.00376,0.00855,0.103,0.511,127.91,pop -Westlife,The Rose,219106,False,2006,0,0.272,0.203,9,-9.706,1,0.0294,0.784,0.0,0.0805,0.172,109.581,pop -Keane,Is It Any Wonder?,186173,False,2006,59,0.489,0.955,11,-2.771,0,0.0363,3.82e-05,0.000178,0.334,0.816,129.352,pop -Orson,No Tomorrow,167493,False,2006,46,0.656,0.624,3,-4.623,0,0.0691,0.0065,0.0,0.278,0.735,124.082,pop -T-Pain,I'm N Luv (Wit a Stripper) (feat. Mike Jones),265333,True,2005,0,0.731,0.368,8,-10.38,1,0.0688,0.00544,0.0,0.193,0.512,145.171,"hip hop, pop, R&B" -KT Tunstall,Black Horse And The Cherry Tree,172373,False,2005,64,0.748,0.786,4,-7.788,0,0.0641,0.328,0.0,0.34,0.917,104.833,"World/Traditional, pop, Folk/Acoustic" -Natasha Bedingfield,Unwritten,259333,False,2004,2,0.706,0.8,5,-6.333,1,0.0399,0.00584,0.0,0.0822,0.629,100.011,pop -Razorlight,America,249760,False,2006,67,0.447,0.568,2,-4.975,1,0.0305,0.295,3.26e-06,0.355,0.149,90.933,"rock, pop" -James Morrison,You Give Me Something,213173,False,2006,64,0.559,0.688,7,-6.496,1,0.114,0.216,0.0,0.0855,0.578,79.753,"pop, R&B" -Snow Patrol,You're All I Have,273333,False,2006,35,0.467,0.921,2,-4.554,1,0.045,5.48e-05,0.0566,0.176,0.501,132.013,"World/Traditional, rock, pop" -The Fray,How to Save a Life,262533,False,2005,79,0.64,0.743,10,-4.08,1,0.0379,0.269,0.0,0.101,0.361,122.035,pop -Sandi Thom,I Wish I Was a Punk Rocker (with Flowers in My Hair),151640,False,2006,61,0.7,0.465,4,-6.815,1,0.358,0.544,0.0,0.606,0.719,108.102,"World/Traditional, pop" -My Chemical Romance,Welcome to the Black Parade,311106,False,2006,76,0.217,0.905,2,-4.103,1,0.0752,0.000289,0.00011,0.222,0.236,96.95,rock -Razorlight,In The Morning,222453,False,2006,59,0.616,0.855,4,-3.495,0,0.042,0.00379,0.000863,0.318,0.686,124.191,"rock, pop" -The All-American Rejects,Dirty Little Secret,193653,False,2005,65,0.469,0.955,10,-4.253,1,0.0432,0.000343,1.35e-06,0.548,0.462,143.853,"rock, pop" -James Morrison,Wonderful World,210066,False,2006,49,0.474,0.71,1,-4.606,1,0.029,0.0548,0.0,0.0974,0.591,78.969,"pop, R&B" -Chris Brown,Yo (Excuse Me Miss),229040,False,2005,69,0.536,0.612,4,-5.847,1,0.272,0.119,0.0,0.209,0.57,86.768,"hip hop, pop, R&B" -P!nk,Who Knew,208493,False,2006,0,0.688,0.734,9,-4.569,1,0.0274,0.00462,0.0,0.0756,0.46,140.004,pop -The Kooks,Naive,203506,False,2006,73,0.391,0.808,8,-6.209,0,0.0892,0.0759,0.0,0.149,0.737,100.304,rock -Leona Lewis,Bleeding Love,262466,False,2007,75,0.638,0.656,5,-5.886,1,0.0357,0.188,0.0,0.146,0.225,104.036,"pop, R&B" -Rihanna,Umbrella,275986,False,2008,81,0.583,0.829,1,-4.603,1,0.134,0.00864,0.0,0.0426,0.575,174.028,"hip hop, pop, R&B" -Britney Spears,Gimme More,251240,False,2007,79,0.788,0.844,2,-3.131,1,0.0334,0.25,0.000678,0.0723,0.382,113.324,pop -Fergie,Big Girls Don't Cry (Personal),268120,False,2006,76,0.708,0.641,7,-4.296,1,0.0335,0.205,0.0,0.0945,0.253,113.082,"pop, R&B" -Mark Ronson,Valerie (feat. Amy Winehouse) - Version Revisited,219413,False,2007,0,0.698,0.844,1,-4.789,1,0.0544,0.00253,0.000426,0.124,0.896,105.828,pop -Plain White T's,Hey There Delilah,232533,False,2005,78,0.656,0.291,2,-10.572,1,0.0293,0.872,0.0,0.114,0.298,103.971,pop -Justin Timberlake,What Goes Around.../...Comes Around (Interlude),448573,False,2006,70,0.687,0.723,7,-4.751,1,0.0709,0.122,0.00102,0.573,0.432,76.0,pop -Gwen Stefani,The Sweet Escape,246466,False,2006,71,0.756,0.77,1,-3.502,1,0.0343,0.191,0.0,0.178,0.73,119.961,"pop, R&B" -Timbaland,The Way I Are,179120,False,2007,80,0.731,0.807,3,-6.492,0,0.1,0.181,0.751,0.319,0.765,114.759,"hip hop, pop, R&B" -Nelly Furtado,Say It Right,223080,False,2006,81,0.872,0.872,1,-6.328,1,0.139,0.0476,0.00112,0.0543,0.809,116.948,"hip hop, pop, latin" -Ida Corr,Let Me Think About It,151973,False,2012,43,0.762,0.754,0,-3.425,0,0.046,0.00022,0.0665,0.146,0.715,129.026,set() -Alex Gaudino,Destination Calabria - Radio Edit,223111,False,2007,73,0.627,0.954,2,-4.804,1,0.0425,0.0015,0.000814,0.292,0.312,128.016,Dance/Electronic -September,Cry for You,209800,False,2007,65,0.767,0.881,9,-3.988,1,0.0301,0.00133,0.000139,0.0551,0.961,130.018,"pop, Dance/Electronic" -Akon,Smack That,212360,True,2006,58,0.94,0.743,5,-5.166,0,0.0475,0.317,0.0,0.0909,0.932,118.988,pop -Timbaland,Give It To Me,234026,True,2007,70,0.975,0.711,8,-3.904,1,0.0632,0.168,0.000552,0.0799,0.815,110.621,"hip hop, pop, R&B" -50 Cent,Ayo Technology,247946,True,2007,67,0.63,0.782,8,-5.44,0,0.132,0.0828,0.0,0.034,0.418,140.144,"hip hop, pop" -Eric Prydz,Proper Education - Radio Edit,198567,False,2007,0,0.537,0.937,0,-4.543,1,0.0523,0.00102,0.035,0.0917,0.324,124.938,"pop, Dance/Electronic" -Fedde Le Grand,Put Your Hands Up For Detroit - Radio Edit,150533,False,2015,0,0.827,0.931,7,-4.474,1,0.202,0.0153,0.21,0.0992,0.491,127.995,"pop, Dance/Electronic" -Ciara,Like a Boy,237053,False,2006,68,0.701,0.724,0,-5.751,0,0.145,0.267,0.0,0.0867,0.425,132.035,"pop, R&B" -Kanye West,Stronger,311866,True,2007,54,0.617,0.717,10,-7.858,0,0.153,0.00564,0.0,0.408,0.49,103.992,hip hop -Beyoncé,Beautiful Liar,199853,False,2007,64,0.764,0.751,8,-3.74,1,0.0703,0.00554,0.000237,0.164,0.418,91.977,"pop, R&B" -Gym Class Heroes,Cupid's Chokehold / Breakfast in America - Radio Mix,237173,False,2007,59,0.584,0.681,1,-5.084,0,0.0781,0.495,0.0,0.289,0.821,79.702,"hip hop, pop" -Akon,I Wanna Love You,247066,True,2006,53,0.865,0.45,0,-9.387,1,0.0417,0.0359,1.28e-06,0.308,0.352,99.989,pop -Soulja Boy,Crank That (Soulja Boy),221933,False,2007,74,0.736,0.74,0,-2.18,1,0.0786,0.515,0.0,0.0468,0.803,140.141,"hip hop, pop" -Fat Joe,Make It Rain,247413,True,2006,59,0.744,0.697,11,-5.063,0,0.197,0.0106,0.0,0.0842,0.767,149.28,"hip hop, pop" -P!nk,U + Ur Hand,214386,True,2006,0,0.709,0.891,0,-3.688,1,0.0528,0.00144,0.0,0.034,0.886,141.04,pop -Rihanna,Shut Up And Drive,212280,False,2008,69,0.735,0.826,1,-4.902,1,0.0349,0.00101,0.129,0.197,0.74,132.964,"hip hop, pop, R&B" -Lloyd,Get It Shawty,209533,False,2007,62,0.822,0.905,5,-4.032,0,0.241,0.123,0.0,0.0496,0.674,128.014,"hip hop, pop, R&B" -Kanye West,Can't Tell Me Nothing,271600,True,2007,47,0.596,0.62,5,-6.133,0,0.039,0.0122,0.0,0.82,0.102,80.029,hip hop -Camille Jones,The Creeps - Fedde Le Grand Radio Mix,152333,False,2006,17,0.847,0.861,6,-6.632,1,0.0603,0.278,0.029,0.0626,0.724,127.894,set() -Linkin Park,What I've Done,205613,False,2007,77,0.623,0.93,5,-5.285,1,0.0324,0.0141,1.64e-06,0.138,0.287,120.119,"rock, metal" -Keyshia Cole,Last Night,255706,False,2007,60,0.918,0.857,3,-5.032,0,0.0623,0.166,0.0003,0.0855,0.972,121.006,"hip hop, pop, R&B" -Chris Brown,Kiss Kiss (feat. T-Pain),250666,False,2007,68,0.729,0.658,10,-3.386,0,0.225,0.0506,0.0,0.0693,0.551,140.043,"hip hop, pop, R&B" -Shop Boyz,Party Like A Rock Star,253400,True,2007,61,0.619,0.709,7,-4.633,1,0.0395,0.00541,0.0,0.507,0.68,144.972,"hip hop, pop" -Finger Eleven,Paralyzer,208106,False,2007,73,0.644,0.939,11,-3.486,0,0.0456,0.157,0.0,0.233,0.861,106.031,"rock, metal" -Justin Timberlake,Summer Love,252973,False,2006,66,0.606,0.591,0,-7.228,0,0.087,0.0756,0.0,0.328,0.477,95.802,pop -MiMS,This Is Why I'm Hot,253706,True,2007,59,0.621,0.6,8,-6.349,1,0.576,0.277,0.0,0.495,0.485,80.021,"hip hop, pop" -Fabolous,Make Me Better,253573,True,2007,62,0.607,0.599,11,-6.886,1,0.0995,0.33,0.0,0.199,0.562,86.491,"hip hop, pop, R&B" -Seether,Fake It,193893,True,2007,73,0.611,0.95,8,-3.509,1,0.0518,0.00141,1.59e-06,0.0543,0.607,132.078,"rock, metal" -Take That,Patience,202066,False,2006,64,0.309,0.783,10,-4.154,1,0.0405,0.142,1.11e-06,0.118,0.372,175.788,pop -Arctic Monkeys,Brianstorm,172866,False,2007,67,0.42,0.974,1,-4.706,1,0.191,8.15e-05,0.00155,0.0871,0.463,165.182,rock -Justin Timberlake,My Love (feat. T.I.),276160,False,2006,72,0.771,0.68,11,-5.881,0,0.224,0.277,1.18e-05,0.682,0.808,119.952,pop -Fergie,Glamorous,246600,True,2006,64,0.811,0.757,0,-6.447,1,0.23,0.298,0.0,0.103,0.565,130.993,"pop, R&B" -Linkin Park,Bleed It Out,166373,True,2007,73,0.529,0.967,1,-3.647,0,0.0508,0.114,0.0,0.61,0.596,140.127,"rock, metal" -My Chemical Romance,Teenagers,161920,False,2006,80,0.463,0.857,4,-3.063,1,0.0632,0.0506,0.0,0.184,0.856,111.647,rock -Calvin Harris,Acceptable in the 80's,333680,False,2007,57,0.787,0.808,7,-5.454,1,0.0511,0.0143,0.257,0.0466,0.942,127.99,"hip hop, pop, Dance/Electronic" -Sugababes,About You Now,212400,False,2007,66,0.584,0.699,1,-6.065,0,0.0378,6.52e-05,6.05e-05,0.486,0.585,82.499,"pop, R&B" -Avril Lavigne,Girlfriend,216600,True,2007,75,0.56,0.959,7,-2.433,1,0.102,0.000722,0.000221,0.209,0.669,163.983,pop -Ne-Yo,Because Of You,266840,False,2007,74,0.81,0.538,0,-5.784,0,0.0356,0.528,0.0,0.0951,0.828,109.97,"pop, R&B" -Daughtry,It's Not Over,215173,False,2016,45,0.45,0.921,10,-3.476,0,0.0538,0.0467,0.0,0.311,0.413,145.959,"pop, rock, metal" -Unk,Walk It Out,173040,True,2013,48,0.794,0.777,1,-3.696,0,0.0813,0.0982,0.0,0.202,0.706,160.022,"hip hop, pop" -Lloyd,You,273133,False,2007,70,0.412,0.876,5,-6.008,0,0.47,0.102,0.0,0.297,0.703,130.159,"hip hop, pop, R&B" -Klaxons,Golden Skans,165120,False,2007,65,0.463,0.836,2,-2.776,0,0.0381,0.000418,7.38e-06,0.25,0.713,141.955,rock -T-Pain,Bartender (feat. Akon),238800,True,2007,3,0.675,0.394,8,-8.539,1,0.133,0.0611,0.0,0.214,0.405,104.987,"hip hop, pop, R&B" -Baby Boy Da Prince,The Way I Live - Main Explicit,324906,True,2006,59,0.88,0.438,0,-7.562,1,0.248,0.0147,0.0,0.0907,0.714,90.02,"hip hop, pop" -Fergie,Fergalicious,292373,True,2006,63,0.906,0.584,8,-7.72,0,0.316,0.0576,0.0,0.128,0.831,129.055,"pop, R&B" -Omarion,Ice Box,256426,False,2006,57,0.775,0.731,8,-5.446,1,0.134,0.189,0.0,0.129,0.821,131.105,"hip hop, pop, R&B" -Pretty Ricky,On the Hotline - Amended Version,242586,False,2007,57,0.704,0.854,10,-5.477,0,0.183,0.0185,0.0,0.148,0.688,92.988,"hip hop, pop, R&B" -Nickelback,Rockstar,252040,False,2005,69,0.616,0.91,0,-3.004,1,0.0386,0.0459,0.0,0.343,0.693,144.072,"rock, metal" -Fall Out Boy,Thnks fr th Mmrs,203506,False,2007,76,0.459,0.891,10,-5.057,0,0.0623,0.00511,0.0,0.106,0.588,154.837,rock -Justin Timberlake,LoveStoned / I Think She Knows (Interlude),444333,True,2006,57,0.859,0.681,1,-6.247,1,0.0549,0.34,0.000161,0.213,0.852,121.243,pop -All Time Low,"Dear Maria, Count Me In",182826,False,2007,76,0.459,0.895,2,-3.126,1,0.0805,0.00725,0.0,0.206,0.572,181.04,rock -Amy Winehouse,Back To Black,241293,True,2006,76,0.403,0.422,7,-13.964,0,0.0373,0.134,2.05e-05,0.0861,0.378,122.728,R&B -Keyshia Cole,Let It Go,238360,False,2007,58,0.808,0.721,1,-5.165,1,0.213,0.197,0.0,0.205,0.773,94.955,"hip hop, pop, R&B" -Ciara,Promise,267413,False,2006,56,0.697,0.629,9,-5.757,1,0.0425,0.409,3.8e-06,0.0819,0.439,123.279,"pop, R&B" -Enrique Iglesias,Do You Know? (The Ping Pong Song),219533,False,2007,45,0.753,0.759,8,-4.829,1,0.0487,0.0523,0.0,0.0263,0.706,114.999,"pop, latin" -T2,Heartbroken - Edit,178613,False,2017,59,0.716,0.757,11,-5.438,0,0.0329,0.0258,0.00392,0.16,0.877,139.986,Dance/Electronic -Robyn,With Every Heartbeat,254920,False,2005,36,0.477,0.865,4,-4.462,0,0.0395,0.00877,4.52e-05,0.271,0.413,120.872,"pop, Dance/Electronic" -Maroon 5,Makes Me Wonder,211080,True,2007,62,0.803,0.851,7,-2.429,1,0.0357,0.00485,0.000358,0.0577,0.881,113.996,pop -Fall Out Boy,"This Ain't A Scene, It's An Arms Race",212040,False,2007,68,0.435,0.887,9,-4.07,1,0.0636,0.000747,0.0,0.0327,0.408,199.935,rock -Paramore,Misery Business,211520,False,2007,72,0.517,0.906,1,-3.677,1,0.0735,0.00272,9.26e-06,0.113,0.731,172.977,"pop, rock" -Hinder,Better Than Me,223533,False,2019,30,0.451,0.682,2,-5.295,0,0.029,0.205,0.0,0.175,0.254,139.913,"pop, rock, metal" -Huey,"Pop, Lock & Drop It - Video Edit",261026,False,2006,52,0.723,0.644,11,-6.863,0,0.222,0.00308,0.0,0.352,0.645,144.09,"hip hop, pop" -Foo Fighters,The Pretender,269373,False,2007,78,0.433,0.959,9,-4.04,1,0.0431,0.000917,0.0,0.028,0.365,172.984,"rock, metal" -Plies,Shawty (feat. T Pain),255413,True,2007,61,0.444,0.708,4,-5.926,0,0.126,0.0419,0.0,0.0775,0.26,105.412,"hip hop, pop, R&B" -Kaiser Chiefs,Ruby,204200,False,2007,67,0.451,0.939,5,-2.82,0,0.0512,0.00673,4.85e-06,0.0774,0.448,93.416,rock -Mark Ronson,Stop Me (feat. Daniel Merriweather),232946,False,2007,46,0.626,0.906,0,-4.334,1,0.0403,0.00128,8.52e-06,0.137,0.346,118.035,pop -Bow Wow,Outta My System (feat. T-Pain & Johntá Austin),238266,False,2006,57,0.772,0.59,6,-6.957,1,0.183,0.0917,0.0,0.101,0.743,83.998,"hip hop, pop, R&B" -Jim Jones,We Fly High,236080,True,2006,0,0.685,0.8,1,-6.564,1,0.298,0.0847,0.0,0.092,0.483,120.226,"hip hop, pop" -The Red Jumpsuit Apparatus,Face Down,192000,False,2006,74,0.545,0.932,7,-2.189,0,0.0399,0.000665,0.0,0.127,0.464,92.956,pop -MIKA,Grace Kelly,187733,False,2006,69,0.675,0.828,0,-5.799,1,0.0454,0.0242,0.0102,0.364,0.669,122.229,pop -Christina Aguilera,Candyman,194213,False,2006,66,0.686,0.789,8,-4.713,0,0.23,0.0125,0.0147,0.142,0.72,172.976,pop -Sean Kingston,Beautiful Girls,225373,False,2007,78,0.762,0.661,1,-6.075,0,0.0687,0.15,0.0,0.256,0.769,130.009,"hip hop, pop, R&B" -Akon,Don't Matter,293066,True,2006,52,0.801,0.454,2,-6.035,1,0.0371,0.225,0.0,0.226,0.34,125.139,pop -James Blunt,1973,280026,False,2007,68,0.72,0.668,9,-7.928,1,0.0269,0.0652,0.00664,0.0789,0.768,123.007,pop -Carrie Underwood,Before He Cheats,199946,False,2005,76,0.519,0.749,6,-3.318,0,0.0405,0.271,0.0,0.119,0.29,147.905,"pop, country" -Just Jack,Starz In Their Eyes,295933,False,2006,52,0.67,0.8,3,-6.731,1,0.0652,0.00184,0.000356,0.466,0.656,123.802,Dance/Electronic -T-Pain,Buy U a Drank (Shawty Snappin') (feat. Yung Joc),227960,False,2007,2,0.451,0.55,1,-8.137,1,0.262,0.0108,0.0,0.0737,0.594,80.001,"hip hop, pop, R&B" -Bone Thugs-N-Harmony,I Tried,287480,True,2007,61,0.727,0.71,4,-6.142,1,0.0742,0.0104,0.0,0.107,0.378,81.995,"hip hop, pop" -Avril Lavigne,When You're Gone,240493,False,2007,68,0.457,0.719,4,-3.921,0,0.0323,0.191,0.0,0.228,0.168,142.034,pop -Rich Boy,Throw Some D's,263920,True,2007,57,0.697,0.86,11,-5.181,1,0.43,0.0368,0.0,0.0721,0.433,160.895,"hip hop, pop" -MIKA,Love Today,235173,False,2006,57,0.673,0.913,4,-4.981,1,0.0664,0.0304,0.00298,0.113,0.587,124.484,pop -Manic Street Preachers,Your Love Alone Is Not Enough (feat. Nina Persson),235693,False,2007,53,0.344,0.921,2,-3.049,1,0.0459,0.00143,1.13e-05,0.38,0.396,126.766,"rock, pop" -Elliott Yamin,Wait for You,261320,False,2007,58,0.764,0.487,0,-6.734,1,0.0281,0.25,0.0,0.184,0.352,116.027,set() -Wyclef Jean,"Sweetest Girl (Dollar Bill) (feat. Akon, Lil' Wayne & Niia)",241133,False,2007,56,0.733,0.744,1,-3.51,1,0.0553,0.0772,0.0,0.341,0.529,92.515,set() -Boys Like Girls,The Great Escape,206520,False,2007,62,0.423,0.94,1,-4.012,0,0.0635,0.00166,0.0,0.178,0.505,149.934,pop -Take That,Rule The World - Radio Edit,237760,False,2007,58,0.357,0.774,2,-4.226,1,0.034,0.0376,0.0,0.348,0.348,164.054,pop -J. Holiday,Bed,275106,False,2007,64,0.684,0.606,5,-7.268,0,0.0504,0.17,0.0,0.058,0.723,127.901,"hip hop, pop, R&B, Dance/Electronic" -Red Hot Chili Peppers,Snow (Hey Oh),334666,False,2006,79,0.427,0.9,11,-3.674,1,0.0499,0.116,1.75e-05,0.119,0.599,104.655,rock -Ludacris,Runaway Love,280680,True,2006,51,0.405,0.721,1,-6.97,1,0.283,0.329,0.0,0.252,0.815,92.854,"hip hop, pop" -Flo Rida,Low (feat. T-Pain),231400,False,2008,80,0.918,0.609,10,-5.64,0,0.0791,0.0928,0.0,0.139,0.304,128.008,"hip hop, pop" -Katy Perry,I Kissed A Girl,179640,False,2008,73,0.699,0.76,5,-3.173,1,0.0677,0.00223,0.0,0.132,0.696,129.996,pop -Rihanna,Take A Bow,229413,False,2008,74,0.697,0.467,9,-7.536,1,0.0715,0.248,0.0,0.0941,0.572,82.082,"hip hop, pop, R&B" -Alicia Keys,No One,253813,False,2007,77,0.644,0.549,1,-5.415,0,0.0285,0.0209,8.85e-06,0.134,0.167,90.04,"pop, R&B" -Timbaland,Apologize,184400,False,2007,74,0.653,0.604,8,-6.017,1,0.0278,0.0292,0.0,0.097,0.101,118.016,"hip hop, pop, R&B" -Ne-Yo,Miss Independent,232000,False,2008,71,0.668,0.673,1,-5.714,1,0.145,0.48,0.0,0.194,0.727,171.812,"pop, R&B" -Chris Brown,With You,252120,False,2007,70,0.662,0.693,3,-4.298,1,0.0698,0.134,0.0,0.145,0.655,86.009,"hip hop, pop, R&B" -Beyoncé,If I Were a Boy,249146,False,2008,66,0.632,0.518,6,-6.126,1,0.0313,0.107,0.0,0.354,0.427,90.007,"pop, R&B" -Kardinal Offishall,Dangerous,246053,True,2008,75,0.949,0.79,8,-5.957,1,0.0756,0.00437,0.0,0.0816,0.807,117.002,"hip hop, pop, R&B" -Madcon,Beggin (original version),216146,False,2007,70,0.715,0.8,4,-5.144,0,0.057,0.0271,0.0,0.0648,0.445,129.023,"hip hop, pop" -September,Cry for You,209800,False,2007,65,0.767,0.881,9,-3.988,1,0.0301,0.00133,0.000139,0.0551,0.961,130.018,"pop, Dance/Electronic" -Rihanna,Disturbia,238626,False,2008,76,0.707,0.813,11,-4.515,0,0.0571,0.0863,0.0,0.168,0.722,124.921,"hip hop, pop, R&B" -Guru Josh Project,Infinity 2008,190013,False,2011,1,0.493,0.849,7,-6.139,0,0.0576,0.000337,0.00672,0.355,0.483,127.999,hip hop -Britney Spears,Break the Ice,196053,False,2007,61,0.712,0.911,5,-3.866,0,0.0445,0.689,8.02e-06,0.107,0.85,117.533,pop -Lil Wayne,Lollipop,299333,True,2008,69,0.829,0.428,0,-9.469,1,0.0831,0.056,0.00413,0.137,0.45,148.075,"hip hop, pop" -Basshunter,Now You're Gone - Video Edit,148186,False,2008,63,0.639,0.976,1,-5.503,1,0.354,0.0213,0.0,0.0856,0.354,147.99,pop -Kanye West,Flashing Lights,237506,True,2007,52,0.639,0.628,6,-7.578,0,0.0399,0.0381,0.0,0.386,0.43,90.482,hip hop -Madonna,4 Minutes (feat. Justin Timberlake & Timbaland),189693,False,2009,71,0.753,0.931,2,-4.922,1,0.0652,0.00994,0.00696,0.234,0.767,113.029,pop -Katy Perry,Hot N Cold,220226,False,2008,73,0.706,0.841,7,-3.956,1,0.0418,7.95e-05,0.0,0.0688,0.861,132.032,pop -Rihanna,Don't Stop The Music,267080,False,2008,77,0.835,0.669,6,-5.582,0,0.0643,0.0336,6.92e-05,0.0535,0.542,122.668,"hip hop, pop, R&B" -Snoop Dogg,Sensual Seduction,245520,False,2007,56,0.756,0.829,11,-3.973,1,0.0682,0.003,3.46e-05,0.0578,0.504,120.163,"hip hop, pop" -Akon,Right Now (Na Na Na),240746,False,2008,69,0.83,0.857,8,-4.194,0,0.152,0.262,0.0,0.413,0.607,137.982,pop -Britney Spears,Womanizer,224400,False,2008,76,0.724,0.695,11,-5.226,1,0.0622,0.073,0.0,0.0889,0.235,139.0,pop -Eric Prydz,Pjanoo - Radio Edit,157432,False,2008,65,0.605,0.874,7,-4.949,0,0.0295,0.000563,0.812,0.112,0.836,125.99,"pop, Dance/Electronic" -Kelly Rowland,Work,207920,False,2010,34,0.888,0.823,10,-6.351,0,0.0538,0.00499,0.0,0.291,0.623,102.757,"hip hop, pop, R&B" -Jeezy,Put On,321293,True,2008,69,0.654,0.77,9,-6.091,0,0.0353,0.00342,0.0,0.0887,0.272,137.616,"hip hop, pop" -Madonna,Give It 2 Me,287906,False,2008,57,0.837,0.954,8,-3.512,0,0.0414,0.0933,0.000405,0.143,0.972,127.019,pop -The Pussycat Dolls,When I Grow Up,245680,False,2008,72,0.671,0.685,11,-5.762,0,0.05,0.00165,0.0,0.398,0.368,118.449,"pop, R&B" -will.i.am,Heartbreaker,327880,True,2007,46,0.788,0.549,9,-8.793,0,0.0732,0.0352,0.127,0.147,0.449,120.051,"hip hop, pop" -Soulja Boy,Crank That (Soulja Boy),221933,False,2007,74,0.736,0.74,0,-2.18,1,0.0786,0.515,0.0,0.0468,0.803,140.141,"hip hop, pop" -Britney Spears,Piece of Me,212106,False,2007,64,0.769,0.638,11,-5.054,1,0.216,0.0902,0.0,0.0857,0.782,115.007,pop -T.I.,Live Your Life,338853,True,2008,75,0.375,0.862,11,-3.363,0,0.255,0.071,0.0,0.211,0.478,159.841,"hip hop, pop" -Ne-Yo,Closer,234360,False,2008,65,0.709,0.745,4,-6.437,0,0.0738,0.0225,5.2e-05,0.154,0.576,126.027,"pop, R&B" -Lil Wayne,A Milli,221840,True,2008,69,0.674,0.695,6,-8.636,0,0.278,0.0387,0.00202,0.194,0.773,151.486,"hip hop, pop" -Colby O'Donis,What You Got,243013,False,2008,61,0.775,0.641,1,-6.718,1,0.038,0.0327,0.0,0.18,0.305,119.974,"hip hop, pop, R&B" -Miley Cyrus,See You Again,190453,False,2007,0,0.692,0.911,9,-5.098,0,0.177,0.0149,7.34e-05,0.112,0.801,138.975,pop -Basshunter,All I Ever Wanted - Radio Edit,176453,False,2008,65,0.645,0.984,4,-7.051,1,0.0508,0.164,0.00701,0.164,0.553,144.954,pop -Chris Brown,Kiss Kiss (feat. T-Pain),250666,False,2007,68,0.729,0.658,10,-3.386,0,0.225,0.0506,0.0,0.0693,0.551,140.043,"hip hop, pop, R&B" -Finger Eleven,Paralyzer,208106,False,2007,73,0.644,0.939,11,-3.486,0,0.0456,0.157,0.0,0.233,0.861,106.031,"rock, metal" -Alesha Dixon,The Boy Does Nothing,210680,False,2008,51,0.632,0.972,3,-2.423,0,0.121,0.121,7.74e-05,0.0317,0.845,87.0,pop -Dizzee Rascal,Dance Wiv Me - Radio Edit,204093,False,2011,68,0.878,0.746,11,-4.281,1,0.0451,0.0476,0.0,0.154,0.792,111.996,"hip hop, pop, Dance/Electronic" -Gabriella Cilmi,Sweet About Me,202133,False,2008,56,0.671,0.701,6,-5.035,1,0.0288,0.0607,2.7e-06,0.163,0.585,131.977,R&B -Jack White,Another Way to Die,262240,False,2007,56,0.486,0.765,4,-4.23,0,0.128,0.0239,3.02e-05,0.11,0.386,141.933,"rock, blues" -Plies,Hypnotized (feat. Akon),188493,True,2007,66,0.84,0.571,10,-7.421,1,0.0797,0.16,0.0,0.406,0.617,116.59,"hip hop, pop, R&B" -The Pussycat Dolls,I Hate This Part,218400,False,2008,64,0.756,0.612,5,-4.371,1,0.0317,0.0659,0.0,0.274,0.452,111.572,"pop, R&B" -Sean Kingston,Take You There,236693,False,2008,65,0.752,0.921,5,-4.321,0,0.0696,0.0182,0.0,0.256,0.634,115.033,"hip hop, pop, R&B" -Natasha Bedingfield,Pocketful of Sunshine,203440,False,2008,62,0.726,0.881,9,-3.892,0,0.0391,0.203,0.0,0.108,0.682,110.019,pop -The Offspring,"You're Gonna Go Far, Kid",177826,True,2008,78,0.55,0.917,0,-3.159,1,0.0638,0.00428,0.0,0.197,0.601,126.115,"rock, pop, metal" -Kanye West,Homecoming,203493,True,2007,47,0.667,0.747,1,-7.059,1,0.189,0.337,0.0,0.115,0.918,86.917,hip hop -Estelle,American Boy,284733,True,2008,78,0.727,0.729,0,-2.99,1,0.326,0.171,0.0,0.07,0.512,117.932,R&B -Saving Abel,Addicted,222826,True,2008,69,0.512,0.864,7,-4.146,1,0.0338,0.000821,0.0,0.0982,0.527,138.018,"rock, metal" -Pendulum,Propane Nightmares,313346,False,2008,57,0.356,0.966,7,-4.13,0,0.0825,0.000262,0.148,0.192,0.215,173.992,Dance/Electronic -The Game,My Life,320893,True,2008,60,0.673,0.766,11,-5.018,1,0.349,0.0724,0.0,0.0563,0.382,148.113,"hip hop, pop" -Danity Kane,Damaged,244266,False,2008,57,0.774,0.653,8,-5.158,0,0.05,0.0912,0.0,0.151,0.818,120.01,"pop, R&B" -P!nk,So What,215160,True,2008,76,0.534,0.87,11,-3.078,0,0.0425,0.000334,0.0,0.241,0.462,126.019,pop -Kanye West,Love Lockdown,270306,False,2008,66,0.76,0.524,1,-7.67,0,0.0323,0.0542,0.5,0.112,0.112,119.603,hip hop -Gym Class Heroes,Cookie Jar (feat. The-Dream),216317,True,2008,54,0.687,0.668,5,-4.061,1,0.0282,0.0253,0.0,0.744,0.418,113.053,"hip hop, pop" -M.I.A.,Paper Planes,205200,False,2007,1,0.447,0.848,2,-6.175,1,0.222,0.033,7.45e-05,0.65,0.485,172.247,"pop, rock" -3 Doors Down,It's Not My Time,241960,False,2008,56,0.529,0.934,0,-4.808,1,0.0602,0.00153,5.16e-06,0.118,0.282,127.962,"pop, rock, metal" -Kylie Minogue,Wow,190973,False,2007,47,0.654,0.884,9,-5.466,1,0.202,0.151,0.000268,0.379,0.851,124.072,"pop, Dance/Electronic" -Mariah Carey,Touch My Body,204733,False,2008,0,0.715,0.665,4,-7.736,0,0.0502,0.0996,0.0,0.224,0.84,78.502,"pop, R&B" -MGMT,Kids,302840,False,2007,77,0.451,0.931,9,-3.871,1,0.0719,0.00076,0.0049,0.361,0.172,122.961,rock -The Ting Tings,Shut Up and Let Me Go,171226,False,2007,55,0.852,0.927,7,-4.497,1,0.0581,0.0109,0.00268,0.054,0.887,107.993,"pop, rock, Dance/Electronic" -Lil Wayne,Got Money,244626,True,2008,58,0.694,0.661,7,-4.847,1,0.118,0.00188,0.0,0.676,0.702,82.48,"hip hop, pop" -Santana,Into the Night (feat. Chad Kroeger),222440,False,2007,57,0.595,0.844,7,-4.678,1,0.0331,0.0137,0.0,0.234,0.603,127.981,"rock, blues, latin" -The-Dream,I Luv Your Girl,267866,True,2007,62,0.723,0.322,8,-9.702,1,0.0341,0.014,0.0,0.119,0.0406,90.063,"hip hop, pop, R&B" -Wiley,Wearing My Rolex - Radio Edit,170480,False,2008,59,0.876,0.716,1,-6.884,1,0.135,0.0468,0.000482,0.0667,0.755,131.942,"hip hop, Dance/Electronic" -Usher,Love in This Club (feat. Young Jeezy),259720,False,2008,71,0.573,0.712,0,-5.976,1,0.0732,0.0572,0.0,0.167,0.346,140.012,"hip hop, pop, R&B" -Sam Sparro,Black & Gold - Radio Edit,212360,False,2008,53,0.383,0.703,4,-5.65,0,0.0763,0.00116,2.39e-05,0.112,0.42,135.968,"pop, Dance/Electronic" -"H ""two"" O",What's It Gonna Be (feat. Platnum),207476,False,2008,55,0.733,0.9,9,-4.758,1,0.0341,0.0085,0.0218,0.151,0.937,139.61,Dance/Electronic -The Ting Tings,That's Not My Name,310573,False,2008,51,0.755,0.901,9,-3.152,1,0.0893,0.0451,0.0373,0.363,0.959,145.042,"pop, rock, Dance/Electronic" -Plies,"Bust It Baby, Pt. 2 (feat. Ne-Yo)",240760,True,2008,61,0.648,0.801,5,-7.24,0,0.167,0.154,0.0,0.339,0.807,78.946,"hip hop, pop, R&B" -James Morrison,Broken Strings,250453,False,2008,70,0.57,0.717,1,-4.914,1,0.029,0.00967,0.0,0.0833,0.321,111.91,"pop, R&B" -Leona Lewis,Run,314720,False,2007,62,0.285,0.462,5,-6.166,0,0.0291,0.428,5.61e-06,0.122,0.0923,142.365,"pop, R&B" -Metro Station,Shake It,179946,False,2007,67,0.618,0.955,4,-3.836,1,0.0798,0.00221,3.09e-06,0.486,0.79,150.034,"pop, rock" -T.I.,Whatever You Like,249533,True,2008,74,0.68,0.687,9,-6.162,0,0.0709,0.0161,0.0,0.261,0.467,150.053,"hip hop, pop" -Trey Songz,Can't Help but Wait,206413,False,2007,62,0.699,0.699,8,-5.564,0,0.0737,0.358,0.0,0.0912,0.611,94.977,"hip hop, pop, R&B" -OneRepublic,Stop And Stare,223853,False,2007,59,0.492,0.859,4,-4.274,1,0.0332,0.0659,0.0,0.0756,0.251,92.474,pop -Alexandra Burke,Hallelujah,217826,False,2009,63,0.177,0.425,2,-6.211,0,0.0291,0.654,0.0,0.195,0.0942,182.571,pop -Mary J. Blige,Just Fine,242133,False,2007,64,0.923,0.795,11,-3.61,0,0.117,0.0394,0.000565,0.101,0.588,123.021,"pop, R&B" -Jordin Sparks,One Step At a Time,205160,False,2007,56,0.766,0.692,1,-4.672,1,0.0289,0.0825,0.0,0.0384,0.691,102.028,"pop, R&B" -Coldplay,Viva La Vida,242373,False,2008,80,0.486,0.617,5,-7.115,0,0.0287,0.0954,3.23e-06,0.109,0.417,138.015,"rock, pop" -Kings of Leon,Sex on Fire,203346,False,2008,80,0.542,0.905,9,-5.653,1,0.054,0.00172,0.0104,0.136,0.374,153.398,rock -Kanye West,Good Life,207000,True,2007,45,0.439,0.808,1,-6.881,1,0.346,0.00305,0.0,0.439,0.487,82.962,hip hop -Duffy,Mercy,219920,False,2008,69,0.793,0.859,0,-3.774,1,0.0332,0.266,0.000356,0.133,0.964,129.911,"pop, R&B" -David Archuleta,Crush,213520,False,2008,67,0.57,0.664,0,-4.718,0,0.0322,0.00836,1.2e-05,0.0719,0.487,162.084,pop -J. Holiday,Suffocate - Superclean,220053,False,2007,58,0.458,0.445,9,-8.391,0,0.361,0.64,0.0,0.118,0.447,82.82,"hip hop, pop, R&B, Dance/Electronic" -Gavin DeGraw,In Love With a Girl,206000,False,2008,51,0.438,0.906,3,-4.8,1,0.0438,0.0191,0.0,0.373,0.684,161.905,"pop, Folk/Acoustic" -Jordin Sparks,Tattoo,233466,False,2007,54,0.566,0.766,2,-5.036,1,0.0399,0.431,0.0,0.101,0.547,168.005,"pop, R&B" -Coldplay,Violet Hill,222653,False,2008,63,0.33,0.58,1,-7.875,0,0.0374,0.0614,0.00129,0.115,0.11,76.093,"rock, pop" -Ray J,Sexy Can I feat. Yung Berg,204040,True,2008,1,0.575,0.684,1,-6.007,1,0.325,0.299,0.0,0.614,0.926,172.155,"hip hop, pop, R&B" -Jennifer Hudson,Spotlight,250106,False,2008,60,0.707,0.724,11,-3.887,0,0.051,0.0448,0.0,0.072,0.659,108.984,"pop, R&B, easy listening" -John Legend,Green Light (feat. André 3000),284186,False,2008,55,0.622,0.882,9,-5.59,0,0.119,0.0912,0.0,0.216,0.762,77.506,"pop, R&B" -Jonas Brothers,Burnin' Up,175093,False,2008,68,0.667,0.954,2,-3.462,1,0.0817,0.0296,0.0,0.331,0.807,114.03,pop -Lupe Fiasco,Superstar (feat. Matthew Santos),289000,False,2007,64,0.572,0.822,11,-6.015,0,0.361,0.207,0.0,0.359,0.472,94.812,"hip hop, pop" -Chris Brown,Forever,278573,False,2008,74,0.672,0.82,11,-4.456,1,0.0459,0.0368,0.000188,0.184,0.438,120.005,"hip hop, pop, R&B" -Adele,Chasing Pavements,210506,False,2008,1,0.614,0.47,5,-6.09,0,0.0255,0.291,0.0,0.111,0.329,80.045,"pop, R&B" -Leona Lewis,Better in Time,234173,False,2007,67,0.584,0.7,6,-4.251,1,0.0506,0.512,2.43e-05,0.13,0.549,163.953,"pop, R&B" -Fergie,Clumsy,240426,False,2006,58,0.731,0.563,2,-4.046,1,0.131,0.191,0.00042,0.296,0.452,184.009,"pop, R&B" -T-Pain,Can't Believe It (feat. Lil' Wayne),273826,False,2008,0,0.648,0.516,10,-8.869,0,0.049,0.00179,0.0,0.0772,0.0756,89.828,"hip hop, pop, R&B" -Black Eyed Peas,I Gotta Feeling,289133,False,2009,80,0.743,0.766,0,-6.375,1,0.0265,0.0873,0.0,0.509,0.61,127.96,"hip hop, pop" -Lady Gaga,Poker Face,237200,False,2008,77,0.851,0.806,4,-4.62,1,0.0787,0.118,1.64e-06,0.121,0.787,118.999,pop -Beyoncé,Halo,261640,False,2008,74,0.508,0.72,11,-5.908,0,0.0628,0.272,0.0,0.0563,0.472,79.983,"pop, R&B" -David Guetta,Sexy Bitch (feat. Akon),195853,True,2010,75,0.813,0.627,11,-5.018,0,0.0486,0.0771,0.000616,0.131,0.801,130.011,"hip hop, pop, Dance/Electronic" -Flo Rida,Right Round,204640,False,2009,74,0.72,0.672,7,-6.852,1,0.0551,0.009,0.0,0.232,0.705,124.986,"hip hop, pop" -Taylor Swift,Love Story,236266,False,2008,74,0.617,0.741,2,-3.97,1,0.0311,0.131,0.0,0.0772,0.306,118.984,pop -Pitbull,Hotel Room Service,238506,False,2009,63,0.849,0.599,6,-8.164,1,0.227,0.00301,0.000249,0.0763,0.761,126.003,"hip hop, pop, latin" -Britney Spears,Circus,192360,False,2008,74,0.791,0.733,6,-5.215,0,0.052,0.147,0.000381,0.0713,0.761,114.98,pop -Lady Gaga,LoveGame,216333,False,2008,69,0.894,0.678,6,-5.611,0,0.0523,0.00569,2.43e-06,0.317,0.844,105.024,pop -Skillet,Monster,178013,False,2009,75,0.64,0.957,8,-2.336,1,0.0741,0.0431,0.0,0.0789,0.692,134.992,"rock, pop, metal" -The Prodigy,Omen,216026,False,2009,57,0.545,0.953,7,-5.172,1,0.0441,0.000941,0.117,0.281,0.558,140.002,"rock, pop, Dance/Electronic" -Black Eyed Peas,Meet Me Halfway,284373,False,2009,72,0.798,0.629,11,-6.857,0,0.0735,0.00474,2.17e-05,0.324,0.4,130.0,"hip hop, pop" -Kanye West,Heartless,211000,False,2008,82,0.79,0.647,10,-5.983,0,0.136,0.0515,0.0,0.248,0.654,87.999,hip hop -Kid Cudi,Day 'N' Nite (Nightmare),221240,False,2009,72,0.88,0.443,11,-6.359,0,0.0653,0.462,7.13e-06,0.122,0.803,138.018,hip hop -Lady Gaga,Just Dance,241933,False,2008,76,0.822,0.739,1,-4.541,0,0.0311,0.0264,4.26e-05,0.181,0.745,118.992,pop -Britney Spears,If U Seek Amy,216520,False,2008,69,0.717,0.587,4,-7.296,0,0.0339,0.0192,0.0,0.0523,0.544,129.954,pop -Pitbull,I Know You Want Me (Calle Ocho),237120,False,2009,59,0.825,0.743,2,-5.995,1,0.149,0.0142,2.12e-05,0.237,0.8,127.045,"hip hop, pop, latin" -Mariah Carey,Obsessed,242200,False,2009,66,0.742,0.468,10,-5.557,0,0.0625,0.0465,0.0,0.826,0.369,86.443,"pop, R&B" -Beyoncé,Sweet Dreams,208066,False,2008,48,0.694,0.825,1,-5.986,1,0.111,0.0895,1.86e-05,0.0618,0.788,121.949,"pop, R&B" -Agnes,Release Me,256213,False,2009,65,0.621,0.923,2,-3.124,0,0.0321,0.00645,7.28e-05,0.108,0.716,127.973,"pop, Dance/Electronic" -Black Eyed Peas,Boom Boom Pow,251440,True,2009,68,0.867,0.857,9,-5.892,1,0.0663,0.13,0.00171,0.13,0.402,130.048,"hip hop, pop" -Shakira,She Wolf,188866,False,2009,68,0.865,0.69,7,-7.448,1,0.0443,0.285,0.0162,0.225,0.867,121.983,"pop, latin" -Lady Gaga,Paparazzi,208306,False,2008,70,0.762,0.692,5,-3.973,0,0.0438,0.113,0.0,0.094,0.397,114.906,pop -JAY-Z,Empire State Of Mind,276920,True,2009,82,0.491,0.956,11,-1.538,1,0.392,0.0295,0.0,0.46,0.811,173.585,hip hop -OneRepublic,All The Right Moves,238000,False,2009,65,0.529,0.948,0,-3.527,1,0.0474,0.258,9.35e-06,0.283,0.65,146.024,pop -Jeremih,Birthday Sex,226506,False,2009,67,0.677,0.523,7,-5.603,0,0.0439,0.295,0.0,0.15,0.446,60.019,"hip hop, pop, R&B" -Cascada,Evacuate The Dancefloor,207200,False,2009,63,0.762,0.702,0,-5.87,1,0.0432,0.0167,0.0,0.314,0.898,127.029,"hip hop, pop, Dance/Electronic" -Linkin Park,New Divide,268613,False,2009,68,0.493,0.808,5,-3.365,0,0.0362,0.000235,0.0,0.0983,0.38,117.971,"rock, metal" -Kevin Rudolf,Let It Rock,231173,True,2008,66,0.607,0.783,7,-4.41,1,0.0397,0.000683,0.0,0.0678,0.434,113.172,"hip hop, pop" -JAY-Z,Run This Town,267520,True,2009,75,0.632,0.924,1,-1.802,1,0.29,0.281,0.0,0.263,0.441,86.844,hip hop -Lady Gaga,Bad Romance,294573,True,2009,80,0.696,0.921,0,-3.755,1,0.0363,0.00314,5.24e-05,0.0842,0.714,119.001,pop -T.I.,Live Your Life,338853,True,2008,75,0.375,0.862,11,-3.363,0,0.255,0.071,0.0,0.211,0.478,159.841,"hip hop, pop" -Eminem,We Made You,269613,False,2009,63,0.924,0.853,2,-1.203,1,0.0792,0.107,1.45e-06,0.129,0.67,114.003,hip hop -Dizzee Rascal,Bonkers,177573,False,2011,38,0.624,0.977,11,-3.34,0,0.22,0.00615,0.0,0.253,0.74,126.127,"hip hop, pop, Dance/Electronic" -Ciara,Love Sex Magic (feat. Justin Timberlake),220426,False,2009,58,0.893,0.666,10,-5.089,0,0.138,0.0206,0.0,0.342,0.874,107.011,"pop, R&B" -Rihanna,Russian Roulette,227533,True,2009,64,0.48,0.486,6,-5.754,0,0.0447,0.046,0.0,0.107,0.265,80.051,"hip hop, pop, R&B" -Soulja Boy,Turn My Swag On,206333,False,2008,59,0.546,0.712,3,-3.104,0,0.0269,0.000969,0.0,0.406,0.489,150.154,"hip hop, pop" -The xx,Intro,127920,False,2009,2,0.617,0.778,9,-8.871,0,0.027,0.459,0.925,0.128,0.152,100.363,rock -Jay Sean,Down,212106,False,2009,3,0.657,0.695,2,-4.493,1,0.0321,0.0108,0.0,0.0822,0.683,65.997,"hip hop, pop, R&B" -Keri Hilson,Turnin Me On - Original Dirty,248066,True,2008,41,0.661,0.584,1,-5.09,1,0.061,0.00293,4.82e-06,0.134,0.0949,159.873,"pop, R&B" -David Guetta,When Love Takes Over (feat. Kelly Rowland),191000,False,2010,69,0.675,0.862,11,-4.614,1,0.0253,0.0165,0.000427,0.169,0.498,129.967,"hip hop, pop, Dance/Electronic" -Cheryl,Fight For This Love,223253,False,2009,65,0.739,0.741,7,-5.873,1,0.0912,0.013,0.0,0.0679,0.727,122.988,"pop, Dance/Electronic" -Soulja Boy,Kiss Me Thru The Phone,193386,True,2008,76,0.758,0.712,7,-3.781,1,0.112,0.0185,0.0,0.0677,0.795,149.998,"hip hop, pop" -Eminem,Crack A Bottle,297520,True,2009,70,0.516,0.874,9,-2.571,1,0.186,0.0864,0.0,0.173,0.391,169.561,hip hop -P!nk,Sober,251440,False,2010,46,0.614,0.792,6,-4.907,1,0.0299,0.0653,0.0,0.195,0.407,91.066,pop -Tinchy Stryder,Number 1,212773,False,2009,59,0.581,0.824,0,-3.912,0,0.0377,0.0105,0.0,0.483,0.747,114.912,"pop, Dance/Electronic" -Pixie Lott,"Mama Do (Uh Oh, Uh Oh)",196520,False,2009,54,0.451,0.873,3,-4.711,0,0.19,0.171,0.0,0.327,0.589,119.918,"pop, Dance/Electronic" -JLS,Beat Again - Radio Edit,196640,False,2009,0,0.894,0.93,11,-4.577,1,0.0712,0.0355,0.0,0.0759,0.968,120.02,pop -3OH!3,DONTTRUSTME,192573,True,2008,70,0.791,0.713,5,-3.742,0,0.254,0.0163,0.0,0.189,0.514,130.012,"hip hop, pop, rock" -A.R. Rahman,Jai Ho! (You Are My Destiny),222400,False,2009,65,0.657,0.941,8,-3.919,0,0.061,0.0476,0.0,0.0797,0.879,136.202,set() -The Pussycat Dolls,I Hate This Part,218400,False,2008,64,0.756,0.612,5,-4.371,1,0.0317,0.0659,0.0,0.274,0.452,111.572,"pop, R&B" -Akon,Beautiful,312986,False,2008,65,0.74,0.945,0,-4.442,0,0.0889,0.123,0.0,0.112,0.629,130.015,pop -Alexandra Burke,Bad Boys (feat. Flo Rida),206480,False,2009,57,0.67,0.866,1,-3.684,1,0.0538,0.0115,0.0,0.358,0.636,140.029,pop -Mario,Break Up,249026,True,2009,51,0.44,0.517,0,-7.355,1,0.363,0.428,0.0,0.173,0.475,73.881,"pop, R&B" -Drake,Forever,357706,True,2009,73,0.457,0.906,5,-2.278,0,0.342,0.249,0.0,0.182,0.54,104.02,"hip hop, pop, R&B" -Selena Gomez & The Scene,Naturally,202586,False,2009,63,0.605,0.902,10,-5.406,0,0.0511,0.0185,1.01e-06,0.0534,0.875,132.612,"pop, Dance/Electronic" -Kanye West,Love Lockdown,270306,False,2008,66,0.76,0.524,1,-7.67,0,0.0323,0.0542,0.5,0.112,0.112,119.603,hip hop -The Veronicas,Untouched,255360,False,2005,58,0.557,0.783,6,-4.893,0,0.234,0.0171,0.0172,0.151,0.442,177.008,"pop, Dance/Electronic" -Fabolous,Throw It In The Bag,231573,True,2009,59,0.789,0.55,11,-7.423,0,0.0473,0.174,0.0,0.0614,0.696,172.049,"hip hop, pop, R&B" -Jason Derulo,Whatcha Say,221253,False,2010,70,0.615,0.711,11,-5.507,1,0.0779,0.0444,0.0,0.145,0.711,144.036,"hip hop, pop" -Lily Allen,Not Fair,201213,True,2009,54,0.719,0.861,5,-6.982,1,0.0402,0.0447,0.00989,0.219,0.948,121.491,pop -T.I.,Dead And Gone,299746,True,2008,68,0.713,0.746,0,-4.99,1,0.259,0.0402,0.0,0.601,0.47,135.021,"hip hop, pop" -Sean Kingston,Fire Burning,239986,False,2009,71,0.839,0.804,1,-2.513,1,0.0329,0.0192,0.0,0.331,0.888,122.973,"hip hop, pop, R&B" -Empire of the Sun,Walking On A Dream,198440,False,2008,77,0.871,0.701,5,-5.594,0,0.0458,0.257,7.52e-06,0.0589,0.716,126.975,"rock, pop, Dance/Electronic" -Cobra Starship,Good Girls Go Bad (feat. Leighton Meester),196413,False,2009,65,0.594,0.874,0,-3.716,1,0.0815,0.0116,0.0,0.549,0.628,119.964,"pop, Dance/Electronic" -Nickelback,If Today Was Your Last Day,249066,False,2008,66,0.485,0.911,3,-5.749,1,0.0355,8.67e-05,0.0,0.092,0.568,89.956,"rock, metal" -La Roux,Bulletproof,205733,False,2009,71,0.674,0.882,3,-2.771,0,0.0477,0.000441,6.47e-05,0.068,0.682,123.016,"pop, Dance/Electronic" -Kasabian,Fire,252279,False,2009,64,0.525,0.742,10,-5.643,1,0.0311,0.0874,0.149,0.117,0.179,117.029,rock -Calvin Harris,I'm Not Alone - Radio Edit,210973,False,2009,55,0.597,0.684,7,-6.614,1,0.0321,0.00481,0.105,0.317,0.435,130.99,"hip hop, pop, Dance/Electronic" -Justin Bieber,One Time,215866,False,2009,71,0.691,0.853,1,-2.528,0,0.0372,0.0631,7.13e-05,0.082,0.762,145.999,pop -Jamie Foxx,Blame It,289746,True,2008,65,0.673,0.614,7,-5.426,1,0.103,0.0782,0.0,0.139,0.362,176.052,"hip hop, pop, R&B" -La Roux,In For The Kill,248626,False,2009,59,0.629,0.969,8,-0.276,0,0.0455,0.00184,0.0,0.122,0.905,150.01,"pop, Dance/Electronic" -Muse,Uprising,304840,False,2009,75,0.602,0.905,2,-4.046,1,0.0775,0.000202,0.064,0.117,0.411,128.019,rock -Nickelback,Gotta Be Somebody,252653,False,2008,62,0.536,0.89,0,-5.222,1,0.0601,0.000354,0.00165,0.133,0.205,115.998,"rock, metal" -James Morrison,Broken Strings,250453,False,2008,70,0.57,0.717,1,-4.914,1,0.029,0.00967,0.0,0.0833,0.321,111.91,"pop, R&B" -T.I.,Whatever You Like,249533,True,2008,74,0.68,0.687,9,-6.162,0,0.0709,0.0161,0.0,0.261,0.467,150.053,"hip hop, pop" -Dizzee Rascal,Holiday,220626,False,2011,60,0.545,0.918,0,-1.925,1,0.0448,0.0571,0.0,0.0415,0.855,117.985,"hip hop, pop, Dance/Electronic" -Beyoncé,Single Ladies (Put a Ring on It),193213,False,2008,67,0.426,0.584,1,-5.293,1,0.296,0.0383,0.0,0.188,0.272,193.437,"pop, R&B" -Boys Like Girls,Love Drunk,226706,False,2009,64,0.44,0.976,10,-3.17,1,0.141,0.00177,3.39e-06,0.16,0.412,150.005,pop -The-Dream,Rockin' That Shit,221920,True,2009,62,0.672,0.62,8,-4.865,1,0.0422,0.123,0.0,0.362,0.627,78.005,"hip hop, pop, R&B" -OneRepublic,Secrets,224693,False,2009,74,0.516,0.764,2,-6.223,1,0.0366,0.0717,0.0,0.115,0.376,148.021,pop -JLS,Everybody in Love,195586,False,2013,39,0.705,0.783,5,-5.971,0,0.0327,0.0703,0.0,0.36,0.656,140.022,pop -Jason Aldean,Big Green Tractor,204200,False,2009,50,0.616,0.596,4,-5.035,1,0.0289,0.654,0.0,0.218,0.58,137.077,country -The Fray,Never Say Never,256613,False,2009,67,0.23,0.492,8,-5.767,1,0.0317,0.568,8.18e-06,0.176,0.262,160.139,pop -Lily Allen,The Fear,207120,True,2009,53,0.661,0.847,10,-6.948,1,0.0404,0.419,8.82e-05,0.107,0.522,134.002,pop -Kings of Leon,Sex on Fire,203346,False,2008,80,0.542,0.905,9,-5.653,1,0.054,0.00172,0.0104,0.136,0.374,153.398,rock -Drake,Best I Ever Had,258760,True,2010,54,0.431,0.894,5,-2.673,0,0.33,0.0951,0.0,0.188,0.605,162.161,"hip hop, pop, R&B" -Kelly Clarkson,Already Gone,281560,False,2009,56,0.209,0.872,9,-2.996,1,0.0757,0.217,0.0,0.0768,0.294,78.139,"pop, R&B" -Miley Cyrus,Party In The U.S.A.,202066,False,2009,79,0.652,0.698,10,-4.667,0,0.042,0.00112,0.000115,0.0886,0.47,96.021,pop -Jordin Sparks,Battlefield,241920,False,2009,57,0.613,0.634,2,-3.472,1,0.0339,0.0178,0.0,0.0639,0.37,144.953,"pop, R&B" -Ne-Yo,Mad,254533,False,2008,64,0.731,0.644,0,-5.348,1,0.0343,0.665,0.0,0.11,0.691,129.94,"pop, R&B" -Green Day,21 Guns,321093,False,2009,73,0.268,0.742,5,-4.939,1,0.0355,0.0518,0.0,0.626,0.416,159.779,rock -Keri Hilson,Knock You Down,326186,False,2009,57,0.588,0.877,8,-4.78,1,0.16,0.00952,0.0,0.171,0.645,155.165,"pop, R&B" -Taylor Swift,You Belong With Me,231146,False,2008,55,0.687,0.771,6,-4.424,1,0.0384,0.164,2.46e-05,0.112,0.445,129.964,pop -Shinedown,Second Chance,222066,False,2008,67,0.46,0.796,0,-4.501,0,0.0333,0.00107,0.0,0.106,0.182,100.011,"rock, metal" -Asher Roth,I Love College,241933,True,2009,62,0.713,0.826,8,-4.075,1,0.243,0.0897,0.0,0.421,0.664,86.444,"hip hop, pop" -Katy Perry,Waking Up In Vegas,199186,False,2008,49,0.524,0.878,5,-3.108,0,0.0346,0.0012,0.0,0.098,0.59,130.989,pop -Kelly Clarkson,My Life Would Suck Without You,211493,False,2009,68,0.526,0.882,9,-4.006,1,0.0509,0.0014,0.0,0.144,0.424,144.982,"pop, R&B" -Mumford & Sons,Little Lion Man,245173,True,2009,68,0.517,0.492,5,-8.05,1,0.0272,0.0275,3.19e-05,0.0873,0.455,138.585,"Folk/Acoustic, rock, pop" -The Fray,You Found Me,241853,False,2009,73,0.338,0.803,8,-5.412,0,0.0413,0.0187,0.0,0.136,0.4,151.994,pop -Bruno Mars,Just the Way You Are,220734,False,2010,77,0.635,0.841,5,-5.379,1,0.0422,0.0134,0.0,0.0622,0.424,109.021,pop -Eminem,Love The Way You Lie,263373,True,2010,81,0.749,0.925,10,-5.034,1,0.227,0.241,0.0,0.52,0.641,86.989,hip hop -Lady Gaga,Bad Romance,294573,True,2009,80,0.696,0.921,0,-3.755,1,0.0363,0.00314,5.24e-05,0.0842,0.714,119.001,pop -Stromae,Alors on danse - Radio Edit,206066,False,2010,77,0.791,0.59,1,-9.206,0,0.0793,0.0994,0.00203,0.065,0.714,119.951,pop -Kesha,TiK ToK,199693,False,2010,80,0.755,0.837,2,-2.718,0,0.142,0.0991,0.0,0.289,0.714,120.028,"pop, Dance/Electronic" -David Guetta,Memories (feat. Kid Cudi),210853,False,2010,76,0.546,0.916,8,-3.932,1,0.255,0.00144,4.34e-06,0.251,0.375,129.983,"hip hop, pop, Dance/Electronic" -Taio Cruz,Dynamite,202613,False,2010,80,0.751,0.783,4,-3.724,1,0.0859,0.00379,0.0,0.036,0.816,119.975,"hip hop, pop" -Rihanna,Only Girl (In The World),235493,False,2010,73,0.789,0.716,11,-4.241,0,0.0432,0.129,1.07e-05,0.069,0.611,125.906,"hip hop, pop, R&B" -Katy Perry,California Gurls,234653,False,2012,72,0.791,0.754,0,-3.729,1,0.0569,0.00446,0.0,0.163,0.425,125.014,pop -Kesha,Take It Off,215200,False,2010,68,0.729,0.675,5,-5.292,0,0.0286,4.14e-05,0.00126,0.0867,0.74,125.036,"pop, Dance/Electronic" -INNA,Hot - Play & Win Radio Version,217036,False,2010,42,0.817,0.939,7,-6.079,0,0.28,0.0798,0.441,0.431,0.599,128.0,pop -Far East Movement,Like A G6,216893,False,2010,73,0.435,0.837,3,-8.126,1,0.449,0.00676,0.0,0.117,0.778,124.913,"hip hop, pop" -David Guetta,Gettin' Over You (feat. Fergie & LMFAO),188000,False,2010,62,0.615,0.913,10,-5.077,0,0.0816,0.178,0.0,0.0773,0.45,129.944,"hip hop, pop, Dance/Electronic" -Duck Sauce,Barbra Streisand - Radio Edit,196533,False,2010,46,0.769,0.922,1,-1.966,1,0.108,0.000939,0.197,0.233,0.506,127.965,Dance/Electronic -Lucenzo,Danza Kuduro (feat. Don Omar),213986,False,2011,0,0.622,0.942,7,-6.365,1,0.112,0.00713,0.0,0.0715,0.773,130.003,latin -Rihanna,Te Amo,208426,True,2009,67,0.567,0.707,8,-5.455,0,0.0818,0.541,0.000176,0.1,0.751,171.917,"hip hop, pop, R&B" -Timbaland,Carry Out (Featuring Justin Timberlake),232466,False,2009,69,0.531,0.574,10,-6.693,0,0.113,0.114,0.0308,0.256,0.272,115.68,"hip hop, pop, R&B" -DJ Fresh,Gold Dust - Radio Edit,192446,False,2010,63,0.451,0.948,0,-0.74,1,0.147,0.255,0.0,0.392,0.295,176.985,"pop, Dance/Electronic" -Edward Maya,Stereo Love - Radio Edit,184573,False,2010,66,0.799,0.783,1,-3.896,0,0.0322,0.0346,0.0186,0.0757,0.586,127.041,pop -Taio Cruz,Break Your Heart,201546,False,2010,74,0.607,0.934,3,-4.217,1,0.0314,0.0327,0.0,0.0909,0.568,122.01,"hip hop, pop" -Black Eyed Peas,Imma Be,257560,False,2009,61,0.597,0.517,0,-6.963,1,0.365,0.179,0.0,0.307,0.412,92.035,"hip hop, pop" -Sidney Samson,Riverside,320348,True,2009,47,0.804,0.976,1,-2.458,0,0.0445,0.00023,0.894,0.125,0.237,125.997,Dance/Electronic -Eminem,Not Afraid,248133,True,2010,79,0.855,0.954,0,-1.19,0,0.264,0.529,0.0,0.205,0.668,114.635,hip hop -Lady Gaga,Alejandro,274213,False,2009,67,0.623,0.793,11,-6.63,0,0.0462,0.000397,0.0015,0.375,0.36,98.998,pop -Rihanna,Rude Boy,222920,True,2009,74,0.563,0.75,11,-4.496,1,0.127,0.113,0.0,0.0788,0.812,173.906,"hip hop, pop, R&B" -Ne-Yo,Beautiful Monster,251573,False,2010,57,0.708,0.771,5,-6.99,0,0.131,0.209,5.32e-06,0.277,0.494,128.013,"pop, R&B" -3OH!3,Starstrukk,203093,False,2010,32,0.605,0.795,11,-6.086,0,0.0712,0.00153,0.0,0.207,0.263,139.896,"hip hop, pop, rock" -Black Eyed Peas,The Time (Dirty Bit),307640,False,2010,69,0.816,0.813,6,-7.798,0,0.0664,0.0662,2.77e-06,0.602,0.436,127.979,"hip hop, pop" -Kelly Rowland,Commander,218106,False,2011,59,0.395,0.876,11,-3.859,0,0.138,0.0173,8.46e-06,0.362,0.567,124.638,"hip hop, pop, R&B" -Swedish House Mafia,One (Your Name) - Radio Edit,163246,False,2010,63,0.733,0.673,9,-6.572,0,0.0333,0.0132,0.0049,0.16,0.636,125.061,"pop, Dance/Electronic" -Chris Brown,Deuces (feat. Tyga & Kevin McCall),276560,True,2011,68,0.692,0.736,1,-5.109,1,0.11,0.0324,0.0,0.0787,0.217,73.987,"hip hop, pop, R&B" -Usher,DJ Got Us Fallin' In Love (feat. Pitbull),220800,False,2010,79,0.663,0.861,7,-3.398,0,0.109,0.0338,0.0,0.082,0.654,119.963,"hip hop, pop, R&B" -Iyaz,Replay,182306,False,2009,75,0.706,0.751,9,-6.323,1,0.0708,0.173,0.0,0.168,0.195,91.031,"hip hop, pop" -Flo Rida,Club Can't Handle Me (feat. David Guetta),234560,False,2010,76,0.616,0.869,0,-3.911,1,0.0327,0.0283,0.0,0.064,0.473,127.966,"hip hop, pop" -Sean Kingston,Eenie Meenie,201946,False,2010,74,0.72,0.607,1,-4.168,1,0.0322,0.0543,0.0,0.113,0.828,121.223,"hip hop, pop, R&B" -Waka Flocka Flame,No Hands (feat. Roscoe Dash & Wale),263773,True,2010,58,0.76,0.595,1,-6.366,1,0.0391,0.00544,0.0,0.241,0.361,131.497,"hip hop, pop" -Tinie Tempah,Written in the Stars (feat. Eric Turner),207653,False,2010,57,0.656,0.931,7,-4.188,1,0.0981,0.0613,0.0,0.219,0.534,91.881,"hip hop, pop, Dance/Electronic" -Bruno Mars,Talking to the Moon,217866,False,2010,76,0.498,0.59,1,-4.721,0,0.032,0.511,0.0,0.107,0.0784,145.867,pop -Miley Cyrus,Can't Be Tamed,168213,False,2010,0,0.63,0.91,11,-2.919,0,0.144,0.0287,0.0,0.196,0.743,116.98,pop -Mike Posner,Cooler Than Me - Single Mix,213293,False,2010,75,0.768,0.82,7,-4.63,0,0.0474,0.179,0.0,0.689,0.625,129.965,"hip hop, pop, Dance/Electronic" -Lady Gaga,Telephone,220640,False,2009,69,0.824,0.836,3,-5.903,1,0.0404,0.00521,0.000817,0.112,0.716,122.014,pop -Yolanda Be Cool,We No Speak Americano (JT Radio Edit),157438,False,2010,1,0.901,0.805,6,-5.005,1,0.0464,0.0712,0.0812,0.0923,0.737,124.996,Dance/Electronic -Rihanna,Hard,250600,True,2009,55,0.31,0.746,1,-3.599,1,0.111,0.0121,0.0,0.649,0.163,182.032,"hip hop, pop, R&B" -Tinie Tempah,Pass Out,268053,True,2010,53,0.693,0.891,1,-3.261,1,0.296,0.0281,0.0,0.143,0.532,91.1,"hip hop, pop, Dance/Electronic" -Adam Lambert,Whataya Want from Me,227320,False,2009,57,0.438,0.672,11,-4.706,0,0.0429,0.0055,0.0,0.0583,0.457,185.934,pop -Trey Songz,Bottoms Up (feat. Nicki Minaj),242013,True,2010,0,0.845,0.601,1,-5.283,1,0.161,0.0205,0.0,0.385,0.329,74.008,"hip hop, pop, R&B" -Timbaland,If We Ever Meet Again (Featuring Katy Perry),292706,False,2009,68,0.652,0.605,8,-7.371,1,0.0393,0.00481,0.0,0.0605,0.394,126.091,"hip hop, pop, R&B" -Enrique Iglesias,I Like It,231373,False,2010,63,0.648,0.942,10,-2.881,0,0.0878,0.021,0.0,0.0594,0.73,129.007,"pop, latin" -Tim Berg,Seek Bromance - Avicii’s Vocal Edit,203960,False,2010,0,0.515,0.837,11,-2.552,1,0.0401,0.0868,0.0,0.0403,0.52,126.026,"pop, Dance/Electronic" -Usher,OMG (feat. will.i.am),269493,False,2010,73,0.781,0.745,4,-5.81,0,0.0332,0.198,1.14e-05,0.36,0.326,129.998,"hip hop, pop, R&B" -Maroon 5,Misery,216200,False,2010,67,0.703,0.81,4,-4.874,0,0.0424,0.000315,0.0,0.216,0.726,102.978,pop -Jay Sean,Down,212106,False,2009,3,0.657,0.695,2,-4.493,1,0.0321,0.0108,0.0,0.0822,0.683,65.997,"hip hop, pop, R&B" -Rihanna,Man Down,267000,True,2010,68,0.47,0.904,0,-4.024,0,0.177,0.0436,0.0,0.0491,0.557,155.788,"hip hop, pop, R&B" -3OH!3,My First Kiss (feat. Ke$ha),192440,False,2010,62,0.682,0.889,0,-4.166,1,0.0804,0.00564,0.0,0.36,0.827,138.021,"hip hop, pop, rock" -Justin Bieber,Baby,214240,False,2010,81,0.728,0.859,5,-5.237,0,0.137,0.0401,0.0,0.111,0.535,65.043,pop -Cali Swag District,Teach Me How to Dougie,237480,True,2011,66,0.846,0.438,11,-4.981,1,0.141,0.2,9.43e-05,0.0939,0.512,85.013,"hip hop, pop" -Swedish House Mafia,Miami 2 Ibiza - Swedish House Mafia vs. Tinie Tempah,206460,True,2010,54,0.736,0.929,7,-5.89,0,0.0674,0.00237,1.11e-05,0.402,0.658,125.03,"pop, Dance/Electronic" -K'NAAN,Wavin' Flag,220520,False,2009,57,0.625,0.699,0,-6.416,1,0.0729,0.13,0.0,0.238,0.717,75.974,set() -Ludacris,How Low,201586,True,2010,63,0.785,0.498,1,-6.977,1,0.0533,0.00248,1.23e-06,0.224,0.418,143.96,"hip hop, pop" -Kesha,Blah Blah Blah (feat. 3OH!3),172053,True,2010,61,0.752,0.836,10,-3.173,1,0.115,0.0843,0.000425,0.424,0.519,120.003,"pop, Dance/Electronic" -DJ Khaled,"All I Do Is Win (feat. T-Pain, Ludacris, Snoop Dogg & Rick Ross)",232506,True,2010,51,0.544,0.781,10,-3.616,1,0.189,0.014,0.0,0.161,0.277,150.1,"hip hop, pop" -Jason Derulo,In My Head,199026,False,2010,65,0.762,0.748,0,-4.15,0,0.033,0.0266,0.0,0.348,0.851,110.009,"hip hop, pop" -Aloe Blacc,I Need A Dollar,243053,False,2010,1,0.84,0.482,6,-7.116,0,0.0333,0.202,0.0,0.0873,0.957,95.498,R&B -B.o.B,Airplanes (feat. Hayley Williams of Paramore),180480,True,2010,74,0.66,0.867,6,-4.285,0,0.116,0.11,0.0,0.0368,0.377,93.033,"hip hop, pop" -Ludacris,My Chick Bad,216933,True,2010,68,0.624,0.723,1,-6.782,1,0.378,0.172,0.0,0.483,0.757,84.789,"hip hop, pop" -Nelly,Just A Dream,237800,False,2010,73,0.531,0.752,1,-6.161,1,0.0305,0.0421,0.0,0.12,0.103,89.917,"hip hop, pop, R&B" -Rihanna,What's My Name?,263173,False,2010,66,0.692,0.786,2,-2.959,1,0.069,0.23,0.0,0.0797,0.583,100.025,"hip hop, pop, R&B" -Shontelle,Impossible,226533,False,2010,35,0.599,0.624,8,-3.631,1,0.0343,0.385,0.0,0.125,0.539,90.034,"pop, R&B" -Take That,The Flood,289359,False,2010,57,0.514,0.83,11,-6.077,0,0.044,0.0378,1.43e-06,0.122,0.138,100.631,pop -Trey Songz,Say Aah (feat. Fabolous),207546,False,2009,54,0.724,0.87,1,-3.614,0,0.113,0.00453,0.0,0.833,0.81,93.01,"hip hop, pop, R&B" -Jason Derulo,Whatcha Say,221253,False,2010,70,0.615,0.711,11,-5.507,1,0.0779,0.0444,0.0,0.145,0.711,144.036,"hip hop, pop" -Drake,Over,233560,True,2010,57,0.325,0.848,7,-5.611,1,0.279,0.0109,0.0,0.124,0.433,100.093,"hip hop, pop, R&B" -Example,Kickstarts,181826,False,2010,63,0.61,0.836,5,-4.455,1,0.0573,0.00374,0.0,0.358,0.657,126.056,"pop, Dance/Electronic" -Plan B,She Said,208853,False,2010,62,0.72,0.538,3,-5.85,0,0.141,0.301,3.03e-06,0.144,0.815,147.007,Dance/Electronic -The Saturdays,Higher,207613,False,2010,61,0.639,0.862,10,-3.292,1,0.0341,0.049,6.81e-06,0.264,0.596,117.011,"pop, Dance/Electronic" -Jay Sean,Do You Remember,210306,False,2009,2,0.855,0.668,11,-4.892,1,0.0644,0.0242,0.0,0.102,0.803,125.846,"hip hop, pop, R&B" -Usher,Hey Daddy (Daddy's Home),224093,False,2010,60,0.59,0.698,11,-4.262,1,0.0286,0.000176,0.0,0.107,0.352,95.975,"hip hop, pop, R&B" -Lil Wayne,Right Above It,271946,True,2010,68,0.376,0.841,3,-4.348,0,0.356,0.0435,0.0,0.578,0.463,76.052,"hip hop, pop" -The Wanted,All Time Low,205200,False,2010,18,0.689,0.615,8,-5.022,0,0.0563,0.352,0.0,0.0789,0.783,134.036,"pop, Dance/Electronic" -Kesha,Your Love Is My Drug,187133,False,2010,69,0.826,0.612,1,-3.891,1,0.0982,0.00681,0.0,0.0889,0.756,120.057,"pop, Dance/Electronic" -Ellie Goulding,Starry Eyed,176613,False,2010,54,0.504,0.814,5,-5.346,0,0.0389,0.135,1.42e-05,0.324,0.596,149.967,"pop, rock, Dance/Electronic" -Drake,Find Your Love,208946,False,2010,56,0.625,0.613,6,-6.005,0,0.173,0.0209,0.0,0.0286,0.738,96.033,"hip hop, pop, R&B" -Jason Derulo,Ridin' Solo,215746,False,2010,66,0.442,0.83,9,-4.02,1,0.146,0.128,0.0,0.129,0.578,89.338,"hip hop, pop" -Two Door Cinema Club,What You Know,189693,False,2010,0,0.55,0.753,6,-4.003,0,0.0407,0.000665,7.74e-06,0.0921,0.841,139.048,"World/Traditional, rock, pop" -JAY-Z,Young Forever,253906,True,2009,65,0.637,0.69,9,-3.214,0,0.0693,0.417,0.0,0.211,0.103,140.329,hip hop -Young Money,BedRock,288133,True,2009,71,0.733,0.664,8,-6.163,1,0.295,0.102,0.0,0.191,0.557,148.005,"hip hop, pop" -B.o.B,Nothin' on You (feat. Bruno Mars),268320,False,2010,74,0.688,0.853,10,-5.814,1,0.0493,0.386,0.0,0.0862,0.743,103.993,"hip hop, pop" -Katy Perry,Teenage Dream,227741,False,2010,69,0.719,0.798,10,-4.582,1,0.0361,0.0162,2.34e-06,0.134,0.591,120.011,pop -Scouting For Girls,This Ain't a Love Song,210680,False,2017,52,0.458,0.905,0,-4.157,1,0.0451,0.000431,0.0,0.378,0.553,176.667,"pop, rock" -Travie McCoy,Billionaire (feat. Bruno Mars),211160,True,2010,72,0.633,0.673,6,-6.403,0,0.258,0.297,0.0,0.206,0.659,86.776,"hip hop, pop" -Owl City,Fireflies,228346,False,2009,78,0.512,0.662,3,-6.797,1,0.0439,0.0275,0.0,0.118,0.472,180.114,"rock, pop" -Alicia Keys,Empire State of Mind (Part II) Broken Down,216480,False,2009,71,0.484,0.368,6,-7.784,1,0.0341,0.74,3.82e-05,0.118,0.142,92.923,"pop, R&B" -Lady A,Need You Now,236440,False,2010,73,0.581,0.717,4,-4.433,1,0.0318,0.0298,0.000186,0.243,0.316,107.884,"pop, country" -Taylor Swift,Back To December,293026,False,2010,65,0.529,0.67,2,-4.663,1,0.0303,0.117,0.0,0.334,0.286,141.893,pop -Neon Trees,Animal,212306,False,2010,64,0.482,0.829,5,-5.576,1,0.0437,0.00034,0.0,0.378,0.739,147.99,"rock, pop" -Kris Allen,Live Like We're Dying,212506,False,2009,51,0.589,0.893,0,-2.948,1,0.0397,0.0273,0.0,0.343,0.94,92.011,pop -Matt Cardle,When We Collide,226000,False,2011,46,0.443,0.683,2,-5.521,1,0.0343,0.0198,5.26e-06,0.313,0.447,81.986,pop -Robyn,Dancing On My Own,288670,False,2010,0,0.687,0.865,6,-4.663,1,0.0349,0.0743,0.225,0.0966,0.261,117.015,"pop, Dance/Electronic" -The Band Perry,If I Die Young,222773,False,2010,64,0.606,0.497,4,-6.611,1,0.0277,0.348,0.0,0.275,0.362,130.739,country -B.o.B,Magic (feat. Rivers Cuomo),196133,False,2010,65,0.549,0.932,8,-4.11,0,0.343,0.0127,0.0,0.347,0.787,82.439,"hip hop, pop" -Adele,Rolling in the Deep,228093,True,2011,2,0.73,0.77,8,-5.114,1,0.0298,0.138,0.0,0.0473,0.507,104.948,"pop, R&B" -Maroon 5,"Moves Like Jagger - Studio Recording From ""The Voice"" Performance",201493,False,2010,77,0.722,0.761,11,-4.459,0,0.0475,0.0117,0.0,0.315,0.624,128.044,pop -LMFAO,Party Rock Anthem,262146,False,2011,73,0.751,0.736,5,-4.168,0,0.156,0.0206,0.0,0.265,0.352,130.014,"hip hop, pop, Dance/Electronic" -Katy Perry,Firework,227893,False,2010,72,0.638,0.832,8,-5.039,1,0.049,0.141,0.0,0.113,0.648,124.071,pop -Bruno Mars,Grenade,222091,False,2010,78,0.704,0.558,2,-7.273,0,0.0542,0.148,0.0,0.107,0.245,110.444,pop -Pitbull,Hey Baby (Drop It to the Floor) (feat. T-Pain),234453,False,2011,71,0.595,0.912,10,-3.428,0,0.0884,0.0434,0.0,0.259,0.762,128.024,"hip hop, pop, latin" -Alexandra Stan,Mr. Saxobeat,195105,False,2011,0,0.726,0.931,11,-4.152,0,0.0468,0.0218,0.000283,0.143,0.797,126.976,"pop, Dance/Electronic" -David Guetta,Who's That Chick? (feat. Rihanna),201040,False,2010,71,0.675,0.602,11,-4.733,0,0.116,0.00377,0.0,0.0458,0.933,127.938,"hip hop, pop, Dance/Electronic" -Jennifer Lopez,On The Floor,284866,False,2011,79,0.73,0.777,3,-5.194,0,0.0496,0.105,0.000478,0.0691,0.575,130.0,"hip hop, pop, R&B" -Rihanna,S&M,243533,False,2010,73,0.767,0.682,1,-5.02,1,0.042,0.0113,0.00016,0.104,0.833,127.975,"hip hop, pop, R&B" -Kesha,Blow,219973,False,2010,69,0.753,0.729,11,-3.862,0,0.0392,0.00334,5.66e-05,0.073,0.812,120.013,"pop, Dance/Electronic" -Snoop Dogg,Sweat - Remix,195986,False,2011,68,0.813,0.732,7,-5.636,1,0.03,0.0597,0.00137,0.0826,0.731,130.02,"hip hop, pop" -Taio Cruz,Higher,187626,False,2010,61,0.672,0.907,8,-5.069,0,0.0721,0.0048,3.4e-05,0.156,0.746,128.027,"hip hop, pop" -Usher,More - RedOne Jimmy Joker Remix,219986,False,2010,67,0.551,0.893,7,-2.628,1,0.0543,0.00166,0.0,0.348,0.794,125.083,"hip hop, pop, R&B" -Example,Changed the Way You Kiss Me - Radio Edit,195466,False,2011,42,0.578,0.857,4,-3.78,0,0.041,0.00548,0.00162,0.0948,0.188,126.979,"pop, Dance/Electronic" -Black Eyed Peas,Just Can’t Get Enough,219426,False,2010,76,0.659,0.628,0,-8.685,0,0.179,0.186,0.0,0.105,0.262,94.05,"hip hop, pop" -David Guetta,Where Them Girls At (feat. Nicki Minaj & Flo Rida),194840,True,2012,72,0.666,0.876,3,-3.078,1,0.0414,0.055,0.0,0.259,0.552,129.884,"hip hop, pop, Dance/Electronic" -Lykke Li,I Follow Rivers - The Magician Remix,281106,False,2011,51,0.786,0.709,9,-5.737,0,0.0395,0.0192,0.00118,0.0845,0.285,122.019,"pop, Dance/Electronic" -Pitbull,"Give Me Everything (feat. Ne-Yo, Afrojack & Nayer)",252306,False,2011,81,0.671,0.939,8,-3.206,1,0.161,0.191,0.0,0.298,0.53,129.024,"hip hop, pop, latin" -Selena Gomez & The Scene,Love You Like A Love Song,188453,False,2011,79,0.858,0.678,1,-3.87,0,0.0469,0.0761,0.0,0.0741,0.922,117.009,"pop, Dance/Electronic" -Wiz Khalifa,Black and Yellow,217666,True,2011,75,0.684,0.834,2,-4.524,0,0.0675,0.0646,0.0,0.271,0.538,164.02,"hip hop, pop" -Katy Perry,E.T.,229573,False,2012,65,0.62,0.869,1,-5.252,1,0.175,0.0181,0.0,0.369,0.76,151.684,pop -Britney Spears,I Wanna Go,210266,False,2011,66,0.696,0.546,5,-6.55,1,0.0414,0.00379,3.8e-06,0.332,0.787,130.002,pop -Gym Class Heroes,Stereo Hearts (feat. Adam Levine),210960,False,2011,81,0.646,0.795,9,-3.293,1,0.0976,0.0319,0.0,0.267,0.796,89.99,"hip hop, pop" -Edward Maya,Stereo Love - Radio Edit,184573,False,2010,66,0.799,0.783,1,-3.896,0,0.0322,0.0346,0.0186,0.0757,0.586,127.041,pop -Enrique Iglesias,Tonight (I'm Fuckin' You),232213,True,2010,63,0.648,0.89,0,-3.982,0,0.0523,0.0294,3.58e-06,0.116,0.321,125.953,"pop, latin" -Jeremih,Down On Me,228453,False,2010,72,0.7,0.598,2,-7.783,1,0.114,0.0369,0.0,0.111,0.594,160.041,"hip hop, pop, R&B" -Sak Noel,Loca People - Radio Edit,215624,True,2011,49,0.926,0.808,11,-3.148,0,0.0599,0.000915,0.00327,0.0515,0.701,127.998,Dance/Electronic -Olly Murs,Heart Skips a Beat (feat. Rizzle Kicks),202266,False,2011,61,0.843,0.881,9,-3.951,1,0.0581,0.14,0.0,0.0765,0.876,110.621,"pop, Dance/Electronic" -LMFAO,Sexy And I Know It,199480,False,2011,67,0.707,0.861,7,-4.225,1,0.316,0.1,0.0,0.191,0.795,130.021,"hip hop, pop, Dance/Electronic" -Cobra Starship,You Make Me Feel... (feat. Sabi),215693,False,2011,70,0.668,0.857,7,-2.944,0,0.0535,0.0191,6.71e-06,0.0385,0.748,131.959,"pop, Dance/Electronic" -Jessie J,Do It Like A Dude,195240,True,2011,57,0.663,0.843,2,-3.672,1,0.049,0.0491,0.0,0.364,0.375,140.036,"hip hop, pop" -DJ Fresh,Louder (feat. Sian Evans) - Radio Edit,206776,False,2012,55,0.31,0.926,6,-1.131,0,0.0464,0.0184,0.00792,0.483,0.493,139.85,"pop, Dance/Electronic" -Waka Flocka Flame,No Hands (feat. Roscoe Dash & Wale),263773,True,2010,76,0.76,0.595,1,-6.366,1,0.0391,0.00544,0.0,0.241,0.361,131.497,"hip hop, pop" -Nickelback,When We Stand Together,190786,False,2011,65,0.446,0.9,10,-3.541,0,0.0489,0.000102,1.89e-05,0.0404,0.88,187.961,"rock, metal" -M83,Midnight City,243960,False,2011,76,0.507,0.729,11,-5.399,0,0.0393,0.0182,1.4e-06,0.0658,0.272,105.013,"rock, pop, metal, Dance/Electronic" -Britney Spears,Till the World Ends,237946,False,2011,65,0.693,0.705,8,-5.747,1,0.0665,0.0228,0.0,0.202,0.45,131.951,pop -Labrinth,Earthquake (feat. Tinie Tempah),274600,True,2012,62,0.54,0.856,0,-3.966,0,0.1,0.109,0.0,0.276,0.258,153.071,pop -Jason Derulo,Don't Wanna Go Home,206080,False,2011,63,0.671,0.808,2,-4.861,0,0.0652,0.02,0.0,0.134,0.637,121.956,"hip hop, pop" -Coldplay,Paradise,278719,False,2011,82,0.449,0.585,5,-6.761,1,0.0268,0.0509,8.75e-05,0.0833,0.212,139.631,"rock, pop" -Katy Perry,Last Friday Night (T.G.I.F.),230746,False,2012,74,0.649,0.815,3,-3.796,0,0.0415,0.00125,4.31e-05,0.671,0.765,126.03,pop -Adele,Set Fire to the Rain,242973,False,2011,2,0.603,0.67,2,-3.882,0,0.0249,0.00408,1.66e-06,0.112,0.445,107.995,"pop, R&B" -Dr. Dre,I Need A Doctor,283733,True,2011,71,0.594,0.946,3,-4.521,1,0.452,0.0869,0.0,0.306,0.397,155.826,hip hop -Lil Wayne,6 Foot 7 Foot,248586,True,2011,1,0.364,0.752,2,-5.429,1,0.304,0.0007,0.0,0.318,0.606,79.119,"hip hop, pop" -DJ Khaled,I'm On One,296146,True,2011,70,0.413,0.807,11,-3.499,0,0.318,0.0536,0.0,0.631,0.438,149.33,"hip hop, pop" -Chris Brown,Look At Me Now (feat. Lil' Wayne & Busta Rhymes),222586,True,2011,71,0.767,0.677,11,-6.128,0,0.184,0.0339,5.51e-06,0.144,0.538,146.155,"hip hop, pop, R&B" -Far East Movement,Rocketeer,211253,False,2010,61,0.664,0.845,4,-6.115,0,0.0461,0.181,0.0,0.267,0.357,96.005,"hip hop, pop" -The Wanted,Glad You Came,197935,False,2011,77,0.722,0.851,7,-3.873,0,0.0639,0.0319,0.0,0.108,0.452,126.885,"pop, Dance/Electronic" -Kesha,We R Who We R,204760,False,2010,71,0.736,0.817,8,-4.9,1,0.0407,0.00987,0.00167,0.117,0.653,119.95,"pop, Dance/Electronic" -Kelly Rowland,Motivation,230560,False,2011,65,0.744,0.672,9,-5.589,0,0.0418,0.187,0.000123,0.105,0.266,140.889,"hip hop, pop, R&B" -Rihanna,We Found Love,215226,False,2011,75,0.735,0.766,1,-4.485,1,0.0383,0.025,0.00138,0.108,0.6,127.985,"hip hop, pop, R&B" -Nicole Scherzinger,Don't Hold Your Breath,197440,False,2011,62,0.66,0.797,5,-6.096,0,0.0262,0.00646,0.000108,0.187,0.674,110.955,"pop, Dance/Electronic" -Mann,Buzzin Remix,224333,True,2011,48,0.687,0.939,10,-4.372,0,0.146,0.0887,0.0,0.283,0.539,104.029,"hip hop, pop" -Chris Brown,Yeah 3x,241666,True,2011,69,0.705,0.882,11,-3.201,0,0.0445,0.000369,1.16e-06,0.0934,0.7,130.0,"hip hop, pop, R&B" -Swedish House Mafia,Save The World,213337,False,2011,65,0.507,0.665,0,-7.598,1,0.0474,0.0144,0.0,0.0759,0.4,126.879,"pop, Dance/Electronic" -Kanye West,All Of The Lights,299613,True,2010,76,0.531,0.803,1,-3.284,1,0.0717,0.0796,1.71e-05,0.176,0.221,142.113,hip hop -Miguel,Sure Thing,195373,False,2010,78,0.684,0.607,11,-8.127,0,0.1,0.0267,0.000307,0.191,0.498,81.001,"pop, R&B" -Diddy - Dirty Money,Coming Home,238693,False,2010,69,0.392,0.839,7,-1.921,1,0.193,0.158,0.0,0.3,0.232,168.001,"hip hop, pop, R&B" -Rihanna,What's My Name?,263173,False,2010,66,0.692,0.786,2,-2.959,1,0.069,0.23,0.0,0.0797,0.583,100.025,"hip hop, pop, R&B" -Chris Brown,Beautiful People,225881,False,2019,53,0.415,0.775,5,-6.366,0,0.161,0.0658,0.00431,0.0843,0.536,127.898,"hip hop, pop, R&B" -Avril Lavigne,What the Hell,220706,False,2011,74,0.578,0.926,6,-3.689,0,0.0548,0.00472,0.0127,0.14,0.877,149.976,pop -Professor Green,Read All About It,235735,True,2011,39,0.656,0.752,11,-5.522,0,0.0434,0.336,0.0,0.223,0.445,100.963,Dance/Electronic -Chase & Status,Blind Faith,233666,False,2011,60,0.45,0.846,9,-4.712,0,0.0472,0.00523,0.0,0.228,0.402,140.042,Dance/Electronic -Chip,Champion (feat. Chris Brown),237293,True,2011,48,0.415,0.934,6,-2.914,0,0.24,0.078,0.0,0.145,0.52,190.151,"hip hop, Dance/Electronic" -Selena Gomez & The Scene,Who Says,195613,False,2011,76,0.682,0.927,4,-2.915,1,0.0479,0.0843,0.0,0.149,0.744,101.019,"pop, Dance/Electronic" -Rizzle Kicks,Down With The Trumpets,186851,False,2011,59,0.753,0.88,4,-4.689,0,0.0806,0.087,0.0,0.24,0.794,115.057,hip hop -Lady Gaga,Born This Way,260253,False,2011,72,0.587,0.828,11,-5.108,1,0.161,0.00327,0.0,0.331,0.494,123.907,pop -Nicki Minaj,Super Bass,200013,True,2010,0,0.72,0.861,11,-4.339,1,0.209,0.269,5.11e-06,0.601,0.669,126.991,"hip hop, pop" -Mac Miller,Donald Trump,165908,True,2011,61,0.636,0.901,1,-7.094,0,0.118,0.119,0.0,0.391,0.836,162.994,hip hop -Calvin Harris,Bounce (feat. Kelis) - Radio Edit,222186,False,2012,62,0.779,0.963,2,-2.125,0,0.0399,0.0334,0.493,0.664,0.759,127.941,"hip hop, pop, Dance/Electronic" -David Guetta,Without You (feat. Usher),208133,False,2012,70,0.608,0.614,2,-3.727,1,0.0285,0.227,4.06e-06,0.157,0.402,127.884,"hip hop, pop, Dance/Electronic" -Foster The People,Pumped Up Kicks,239600,False,2011,82,0.733,0.71,5,-5.849,0,0.0292,0.145,0.115,0.0956,0.965,127.975,"rock, pop" -Drake,Headlines,235986,True,2011,74,0.636,0.566,6,-7.16,0,0.106,0.365,0.000353,0.0917,0.425,151.894,"hip hop, pop, R&B" -Jessie J,Price Tag,223053,False,2011,73,0.636,0.831,5,-3.945,1,0.182,0.0294,3.85e-06,0.272,0.668,175.015,"hip hop, pop" -Jason Aldean,Dirt Road Anthem,229413,False,2010,53,0.678,0.739,2,-5.068,1,0.0352,0.32,2.36e-06,0.116,0.658,127.037,country -Wiz Khalifa,Roll Up,227773,True,2011,63,0.523,0.805,3,-5.473,1,0.192,0.0524,0.0,0.0914,0.602,125.358,"hip hop, pop" -OneRepublic,Good Life,253306,True,2009,72,0.634,0.69,6,-7.804,1,0.052,0.0771,0.0,0.132,0.645,94.988,pop -Bad Meets Evil,Lighters,303813,True,2011,68,0.676,0.695,0,-8.327,1,0.245,0.352,0.0,0.119,0.144,90.268,"hip hop, pop" -Bruno Mars,The Lazy Song,189109,False,2010,75,0.794,0.711,8,-5.124,0,0.0699,0.3,0.0,0.0955,0.955,174.915,pop -Lupe Fiasco,The Show Goes On,239613,True,2011,70,0.591,0.889,7,-3.839,1,0.115,0.0189,0.0,0.155,0.65,143.067,"hip hop, pop" -James Morrison,I Won't Let You Go,229303,False,2011,64,0.537,0.611,0,-6.427,1,0.0304,0.229,0.0,0.146,0.161,105.955,"pop, R&B" -Jason Derulo,It Girl,192200,False,2011,70,0.668,0.718,1,-4.736,0,0.0605,0.0165,0.0,0.104,0.345,91.993,"hip hop, pop" -Adele,Someone Like You,285240,False,2011,4,0.554,0.321,9,-8.251,1,0.028,0.893,0.0,0.0996,0.288,135.047,"pop, R&B" -Red Hot Chili Peppers,The Adventures of Rain Dance Maggie,282400,False,2011,65,0.683,0.734,4,-4.523,0,0.029,0.00112,0.0139,0.258,0.576,106.254,rock -Keri Hilson,Pretty Girl Rock,243920,False,2010,67,0.666,0.87,11,-5.004,0,0.246,0.203,0.0,0.0851,0.9,160.014,"pop, R&B" -P!nk,F**kin' Perfect,213413,True,2010,60,0.563,0.671,7,-4.788,1,0.0373,0.0422,0.0,0.36,0.45,91.964,pop -Blake Shelton,Honey Bee,210720,False,2011,50,0.481,0.849,4,-5.131,1,0.0385,0.00167,1.49e-06,0.121,0.723,205.57,country -Lady Gaga,The Edge Of Glory,320546,False,2011,67,0.583,0.768,9,-6.477,1,0.041,0.000323,0.0162,0.109,0.357,127.952,pop -Lady A,Just A Kiss,218840,False,2011,63,0.593,0.639,1,-5.826,1,0.0307,0.446,0.0,0.0998,0.332,142.881,"pop, country" -Hot Chelle Rae,Tonight Tonight,200466,False,2011,73,0.686,0.783,4,-4.977,1,0.119,0.0764,0.0,0.163,0.814,99.978,"hip hop, pop, rock" -Grouplove,Tongue Tied,218013,False,2011,79,0.56,0.936,3,-5.835,1,0.0439,0.00847,0.0,0.161,0.371,112.96,"pop, rock, Folk/Acoustic" -Coldplay,Every Teardrop Is a Waterfall,240796,False,2011,69,0.425,0.732,9,-6.883,1,0.0396,0.00194,0.0103,0.171,0.333,117.98,"rock, pop" -Beyoncé,Best Thing I Never Had,253746,False,2011,69,0.545,0.649,6,-4.062,1,0.0324,0.143,1.57e-05,0.0894,0.297,99.099,"pop, R&B" -P!nk,Raise Your Glass,202960,True,2010,76,0.7,0.709,7,-5.006,1,0.0838,0.0048,0.0,0.029,0.624,122.019,pop -Christina Perri,Jar of Hearts,246587,False,2011,72,0.349,0.348,3,-6.142,1,0.0316,0.726,0.0,0.12,0.0886,74.541,pop -Bruno Mars,Marry You,230192,False,2010,75,0.621,0.82,10,-4.865,1,0.0367,0.332,0.0,0.104,0.452,144.905,pop -The Band Perry,If I Die Young,222773,False,2010,64,0.606,0.497,4,-6.611,1,0.0277,0.348,0.0,0.275,0.362,130.739,country -Lil Wayne,How To Love,240306,False,2011,0,0.644,0.661,11,-6.093,1,0.0418,0.000177,5.23e-06,0.108,0.272,153.992,"hip hop, pop" -Charlene Soraia,Wherever You Will Go,197577,False,2011,60,0.597,0.115,9,-9.217,1,0.0334,0.82,0.000215,0.111,0.128,111.202,pop -Christina Perri,A Thousand Years,285120,False,2011,70,0.421,0.407,10,-7.445,1,0.0267,0.309,0.000961,0.11,0.161,139.028,pop -Rihanna,We Found Love,215226,False,2011,75,0.735,0.766,1,-4.485,1,0.0383,0.025,0.00138,0.108,0.6,127.985,"hip hop, pop, R&B" -Carly Rae Jepsen,Call Me Maybe,193400,False,2012,78,0.783,0.58,7,-6.548,1,0.0408,0.0114,2.28e-06,0.108,0.66,120.021,"pop, Dance/Electronic" -fun.,We Are Young (feat. Janelle Monáe),250626,False,2012,76,0.378,0.638,10,-5.576,1,0.075,0.02,7.66e-05,0.0849,0.735,184.086,"rock, pop" -Sean Paul,She Doesn't Mind,227786,False,2012,70,0.718,0.776,7,-5.208,0,0.0693,0.000624,5.3e-06,0.207,0.622,120.015,"hip hop, pop" -Pitbull,International Love (feat. Chris Brown),227280,False,2011,74,0.67,0.855,0,-3.035,0,0.0499,0.0124,0.0,0.335,0.648,120.05,"hip hop, pop, latin" -DEV,In The Dark,226226,False,2012,52,0.736,0.824,8,-4.231,1,0.0672,0.00358,0.000921,0.329,0.443,124.954,"pop, Dance/Electronic" -Adele,Skyfall,286480,False,2012,74,0.346,0.552,0,-6.864,0,0.0282,0.417,0.0,0.114,0.0789,75.881,"pop, R&B" -David Guetta,Turn Me On (feat. Nicki Minaj),199680,False,2012,65,0.704,0.793,8,-2.266,1,0.0591,0.0488,0.0,0.575,0.412,127.96,"hip hop, pop, Dance/Electronic" -Rihanna,Where Have You Been,242680,False,2011,71,0.719,0.847,0,-6.34,0,0.0916,0.00201,0.0204,0.223,0.444,127.963,"hip hop, pop, R&B" -JAY-Z,Ni**as In Paris,219333,True,2011,82,0.789,0.858,1,-5.542,1,0.311,0.127,0.0,0.349,0.775,140.022,hip hop -Maroon 5,One More Night,219546,False,2012,74,0.716,0.821,5,-3.435,0,0.0314,0.0558,0.0,0.0844,0.618,92.997,pop -Skrillex,Bangarang (feat. Sirah),215253,True,2011,70,0.716,0.972,7,-2.302,1,0.196,0.0145,3.22e-05,0.317,0.576,110.026,"hip hop, pop, Dance/Electronic" -The Wanted,Chasing The Sun,198800,False,2012,69,0.637,0.732,7,-6.209,0,0.0965,0.244,0.0,0.498,0.68,128.108,"pop, Dance/Electronic" -Flo Rida,Whistle,224653,False,2012,77,0.747,0.937,0,-5.746,1,0.0453,0.0208,0.0,0.29,0.739,103.976,"hip hop, pop" -will.i.am,This Is Love,279026,False,2013,62,0.535,0.796,0,-3.446,0,0.0445,0.00473,0.0,0.145,0.282,128.954,"hip hop, pop" -David Guetta,She Wolf (Falling to Pieces) [feat. Sia],222500,False,2012,66,0.492,0.857,7,-2.634,1,0.0655,0.0841,7.82e-06,0.344,0.393,129.973,"hip hop, pop, Dance/Electronic" -Nicki Minaj,Pound The Alarm,205640,True,2012,64,0.728,0.858,9,-3.686,1,0.0609,0.0403,4.09e-06,0.0241,0.591,125.055,"hip hop, pop" -Kendrick Lamar,Swimming Pools (Drank) - Extended Version,313786,True,2012,63,0.716,0.485,1,-7.745,1,0.404,0.123,2.69e-05,0.604,0.26,74.132,hip hop -MARINA,Primadonna,221075,False,2012,78,0.66,0.689,4,-2.671,0,0.0337,0.0884,0.0,0.0922,0.427,127.98,"pop, Dance/Electronic" -Kanye West,Mercy,329320,True,2012,73,0.563,0.496,6,-9.381,0,0.406,0.0685,5.8e-05,0.173,0.426,139.993,hip hop -DJ Fresh,Hot Right Now (feat. RITA ORA) - Radio Edit,182333,False,2012,58,0.524,0.972,4,-1.569,0,0.0431,0.00656,0.00058,0.224,0.476,175.017,"pop, Dance/Electronic" -LMFAO,Sexy And I Know It,199480,False,2011,67,0.707,0.861,7,-4.225,1,0.316,0.1,0.0,0.191,0.795,130.021,"hip hop, pop, Dance/Electronic" -PSY,Gangnam Style (강남스타일),219493,False,2012,72,0.727,0.937,11,-2.871,0,0.286,0.00417,0.0,0.091,0.749,132.067,pop -Kesha,Die Young,211920,False,2012,78,0.711,0.7,1,-4.805,0,0.046,0.00498,0.000125,0.215,0.801,128.001,"pop, Dance/Electronic" -Usher,Scream,234693,False,2012,66,0.616,0.862,7,-5.18,0,0.0973,0.00117,0.0,0.179,0.569,127.992,"hip hop, pop, R&B" -Olly Murs,Troublemaker (feat. Flo Rida),185586,False,2012,0,0.762,0.863,0,-3.689,0,0.0565,0.015,0.0,0.125,0.965,106.008,"pop, Dance/Electronic" -Linkin Park,BURN IT DOWN,230253,False,2012,73,0.585,0.972,9,-4.45,0,0.0534,0.0143,0.0,0.0707,0.585,110.006,"rock, metal" -Bruno Mars,Locked out of Heaven,233478,False,2012,85,0.726,0.698,5,-4.165,1,0.0431,0.049,0.0,0.309,0.867,143.994,pop -Avicii,Levels - Radio Edit,199906,False,2011,77,0.584,0.889,1,-5.941,0,0.0343,0.0462,0.828,0.309,0.464,126.04,"pop, Dance/Electronic" -Flo Rida,Good Feeling,248133,False,2012,76,0.706,0.89,1,-4.444,0,0.0688,0.0588,0.00286,0.306,0.684,128.011,"hip hop, pop" -Rihanna,Diamonds,225146,False,2012,75,0.564,0.71,11,-4.92,0,0.0461,0.00125,0.0,0.109,0.393,91.972,"hip hop, pop, R&B" -Wiz Khalifa,"Work Hard, Play Hard",219026,True,2012,62,0.787,0.666,11,-4.475,1,0.0485,0.0409,1.91e-05,0.296,0.485,140.008,"hip hop, pop" -Katy Perry,Part Of Me,216160,False,2012,73,0.678,0.918,5,-4.63,1,0.0355,0.000417,0.0,0.0744,0.769,130.028,pop -Nicki Minaj,Starships,210626,True,2012,75,0.747,0.716,11,-2.457,0,0.075,0.135,0.0,0.251,0.751,125.008,"hip hop, pop" -Kendrick Lamar,m.A.A.d city,350120,True,2012,67,0.487,0.729,2,-6.815,1,0.271,0.0538,4.07e-06,0.44,0.217,91.048,hip hop -Asaf Avidan & the Mojos,One Day / Reckoning Song (Wankelmut Remix) [Radio Edit],212360,False,2012,0,0.821,0.676,3,-6.366,0,0.0547,0.187,0.0001,0.0927,0.559,119.012,pop -Florence + The Machine,Spectrum (Say My Name) - Calvin Harris Remix,218190,False,2011,68,0.578,0.946,11,-3.85,0,0.0482,0.00225,0.00412,0.0966,0.588,126.092,"rock, pop, Dance/Electronic" -Azealia Banks,212,204956,True,2014,0,0.847,0.769,11,-5.761,0,0.258,0.0145,0.00012,0.0767,0.626,126.017,"hip hop, pop, R&B" -Maroon 5,Payphone,231173,True,2012,82,0.743,0.752,4,-4.813,1,0.0414,0.0188,0.0,0.287,0.545,110.015,pop -James Arthur,Impossible,209440,False,2013,75,0.376,0.695,0,-4.782,0,0.0933,0.135,0.0,0.11,0.302,169.533,pop -Michel Teló,Ai Se Eu Te Pego - Live,166866,False,2012,0,0.676,0.935,11,-4.55,1,0.0692,0.357,0.0,0.853,0.85,96.055,"country, latin" -Otto Knows,Million Voices - Radio Edit,192866,False,2012,0,0.582,0.894,8,-6.298,1,0.041,0.0022,0.0223,0.0664,0.0694,125.946,"pop, Dance/Electronic" -Icona Pop,I Love It,156773,True,2012,51,0.71,0.901,1,-2.686,1,0.0296,0.00828,1.34e-05,0.172,0.86,125.953,"pop, Dance/Electronic" -Tyga,Rack City,203200,True,2012,61,0.929,0.339,1,-10.881,1,0.371,0.0373,1.96e-05,0.187,0.273,98.986,"hip hop, pop" -Calvin Harris,Feel So Close - Radio Edit,206413,False,2012,79,0.707,0.924,7,-2.842,1,0.031,0.000972,0.00703,0.204,0.919,127.937,"hip hop, pop, Dance/Electronic" -Jennifer Lopez,Dance Again,237266,False,2012,0,0.797,0.867,11,-5.242,0,0.0696,0.0158,2.79e-06,0.0868,0.716,127.974,"hip hop, pop, R&B" -Grimes,Oblivion,251266,False,2012,0,0.697,0.529,2,-8.838,1,0.0338,0.115,0.901,0.0895,0.486,155.974,"pop, rock, Dance/Electronic" -Adele,Rumour Has It,223266,False,2011,0,0.612,0.748,0,-5.014,1,0.0445,0.617,0.0,0.167,0.574,120.052,"pop, R&B" -Coldplay,Paradise,278719,False,2011,82,0.449,0.585,5,-6.761,1,0.0268,0.0509,8.75e-05,0.0833,0.212,139.631,"rock, pop" -Chris Brown,Turn Up the Music,227973,False,2012,64,0.594,0.841,1,-5.792,1,0.102,0.000238,2.22e-06,0.156,0.643,129.925,"hip hop, pop, R&B" -Frank Ocean,Lost,234093,True,2012,64,0.913,0.603,8,-4.892,1,0.226,0.0272,0.000503,0.167,0.497,123.061,"hip hop, pop, R&B" -Calvin Harris,Let's Go (feat. Ne-Yo),232800,False,2012,66,0.71,0.882,4,-2.932,0,0.0595,0.00777,0.00771,0.294,0.875,128.016,"hip hop, pop, Dance/Electronic" -The Script,Hall of Fame (feat. will.i.am),202533,False,2012,78,0.421,0.873,10,-4.343,1,0.0564,0.0654,0.0,0.123,0.629,84.786,"pop, rock" -Swedish House Mafia,Don't You Worry Child - Radio Edit,212862,False,2012,76,0.612,0.84,11,-3.145,0,0.0509,0.112,0.0,0.116,0.438,129.042,"pop, Dance/Electronic" -MARINA,How to Be a Heartbreaker,221493,False,2012,50,0.69,0.897,11,-4.696,0,0.0506,0.0142,0.0,0.108,0.849,140.05,"pop, Dance/Electronic" -Justin Bieber,Boyfriend,171333,False,2012,63,0.717,0.55,10,-6.019,0,0.0519,0.0358,0.00198,0.126,0.331,96.979,pop -David Guetta,Titanium (feat. Sia),245040,False,2012,79,0.604,0.787,0,-3.674,0,0.103,0.0679,0.15,0.127,0.301,126.062,"hip hop, pop, Dance/Electronic" -Jessie J,Domino,231840,False,2011,74,0.758,0.557,7,-4.568,1,0.034,0.0117,0.0,0.0418,0.781,126.986,"hip hop, pop" -Taylor Swift,I Knew You Were Trouble.,219720,False,2012,59,0.622,0.469,3,-6.798,0,0.0363,0.00454,2.25e-06,0.0335,0.679,77.019,pop -ScHoolboy Q,Hands on the Wheel (feat. Asap Rocky),197131,True,2012,61,0.646,0.784,1,-7.471,0,0.108,0.0166,0.0,0.0721,0.179,127.839,"hip hop, pop" -P!nk,Try,247906,False,2012,76,0.674,0.628,2,-7.079,1,0.03,0.00144,0.0,0.0944,0.552,103.998,pop -Robbie Williams,Candy,201053,False,2012,68,0.715,0.791,10,-6.63,1,0.0414,0.0368,0.0,0.0694,0.879,116.043,"pop, rock" -Snoop Dogg,"Young, Wild & Free (feat. Bruno Mars)",207333,True,2011,77,0.715,0.655,0,-6.425,1,0.137,0.0525,0.0,0.115,0.531,95.078,"hip hop, pop" -Justin Bieber,As Long As You Love Me,229466,False,2012,67,0.571,0.873,0,-3.382,0,0.1,0.0811,0.0,0.361,0.613,139.691,pop -Rita Ora,R.I.P. (feat. Tinie Tempah),228026,False,2012,46,0.603,0.831,11,-3.443,0,0.0479,0.027,0.0,0.652,0.358,72.022,"hip hop, pop, Dance/Electronic" -Alex Clare,Too Close,256613,False,2011,60,0.588,0.694,11,-4.278,0,0.0387,0.00948,0.0,0.113,0.271,126.027,rock -Tyga,Faded,206666,True,2012,58,0.831,0.667,1,-8.438,1,0.188,0.0103,0.0,0.35,0.339,95.073,"hip hop, pop" -Alesso,Years - ラジオ・エディット,195480,False,2012,0,0.37,0.823,2,-6.245,1,0.0805,0.00234,1.91e-06,0.174,0.323,128.008,"pop, Dance/Electronic" -Guy Sebastian,Battle Scars (feat. Lupe Fiasco),250080,False,2012,0,0.61,0.863,5,-2.632,0,0.206,0.186,0.0,0.097,0.508,83.993,pop -Katy Perry,The One That Got Away,227333,False,2012,72,0.687,0.792,1,-4.023,0,0.0353,0.000802,0.0,0.2,0.864,133.962,pop -Avicii,Silhouettes - Original Radio Edit,211880,False,2012,53,0.605,0.8,5,-6.235,0,0.0545,0.155,0.0562,0.121,0.836,128.074,"pop, Dance/Electronic" -Gotye,Somebody That I Used To Know,244884,False,2011,57,0.863,0.527,0,-6.896,1,0.0358,0.607,0.000168,0.107,0.674,129.054,set() -Flo Rida,Wild Ones (feat. Sia),232946,False,2012,77,0.608,0.86,5,-5.324,0,0.0554,0.0991,0.0,0.262,0.437,127.075,"hip hop, pop" -One Direction,What Makes You Beautiful,199986,False,2012,82,0.726,0.787,4,-2.494,1,0.0737,0.009,0.0,0.0596,0.888,124.99,pop -Lana Del Rey,Born To Die,286253,False,2012,53,0.18,0.636,4,-6.591,0,0.0439,0.209,0.000133,0.217,0.39,75.223,pop -Kesha,C'Mon,214333,False,2012,0,0.558,0.781,1,-5.448,0,0.0963,0.00657,0.00144,0.102,0.286,126.025,"pop, Dance/Electronic" -Owl City,Good Time,205933,False,2012,77,0.56,0.872,3,-4.269,1,0.14,0.0239,6.98e-06,0.371,0.682,126.05,"rock, pop" -Calvin Harris,We'll Be Coming Back (feat. Example),234360,False,2012,62,0.596,0.952,7,-4.364,1,0.0873,0.00131,0.0,0.598,0.571,127.945,"hip hop, pop, Dance/Electronic" -Emeli Sandé,"Read All About It, Pt. III",283706,False,2012,65,0.63,0.44,2,-7.186,1,0.0249,0.822,3.87e-06,0.0926,0.343,98.082,"pop, R&B" -Ellie Goulding,Lights - Single Version,210853,False,2010,63,0.682,0.795,8,-6.17,0,0.0367,0.0297,0.0391,0.131,0.78,120.008,"pop, rock, Dance/Electronic" -Train,Drive By,195973,False,2012,77,0.765,0.837,1,-3.113,0,0.032,0.00107,1.06e-05,0.0801,0.721,122.028,pop -Disclosure,Latch,255631,False,2013,74,0.503,0.727,1,-5.456,1,0.167,0.0159,9.45e-05,0.0895,0.521,121.985,"pop, Dance/Electronic" -Coldplay,Princess of China,239215,False,2011,66,0.42,0.69,9,-6.221,0,0.0347,0.00385,0.015,0.287,0.237,85.014,"rock, pop" -One Direction,Live While We're Young,200186,False,2012,0,0.658,0.837,2,-2.063,1,0.0543,0.0629,0.0,0.0969,0.936,126.015,pop -Gym Class Heroes,Ass Back Home (feat. Neon Hitch),222213,True,2011,61,0.716,0.838,10,-4.289,1,0.0513,0.134,0.0,0.148,0.646,130.034,"hip hop, pop" -Carrie Underwood,Blown Away,240133,False,2012,63,0.531,0.843,9,-2.569,0,0.0429,0.0909,0.0,0.0283,0.392,136.991,"pop, country" -alt-J,Breezeblocks,227080,False,2012,71,0.616,0.656,5,-7.298,1,0.0344,0.096,0.000879,0.205,0.286,150.071,rock -Chris Brown,Don't Wake Me Up,222306,False,2012,62,0.602,0.691,7,-5.197,0,0.051,0.0548,0.0,0.144,0.206,127.967,"hip hop, pop, R&B" -Kelly Clarkson,Stronger (What Doesn't Kill You),221946,False,2011,74,0.562,0.939,0,-4.282,1,0.0475,0.046,0.0,0.112,0.684,116.044,"pop, R&B" -Drake,The Motto,181573,True,2011,74,0.766,0.442,1,-8.558,1,0.356,0.000107,6.12e-05,0.111,0.39,201.8,"hip hop, pop, R&B" -Katy Perry,Wide Awake,220946,False,2012,65,0.514,0.683,5,-5.099,1,0.0367,0.0749,2.64e-06,0.392,0.575,159.814,pop -Rita Ora,How We Do (Party),247026,True,2012,48,0.738,0.922,7,-3.94,1,0.0833,0.0034,0.0,0.0986,0.689,116.024,"hip hop, pop, Dance/Electronic" -Rudimental,Feel the Love (feat. John Newman),245186,False,2013,64,0.389,0.706,1,-6.849,1,0.0593,0.0026,0.000182,0.686,0.238,179.911,"pop, Dance/Electronic" -Lana Del Rey,Ride,289080,True,2012,66,0.373,0.686,0,-5.52,1,0.034,0.128,1.96e-06,0.383,0.189,93.763,pop -Ne-Yo,Let Me Love You (Until You Learn To Love Yourself),251626,False,2012,69,0.658,0.677,5,-6.628,1,0.0393,0.248,0.0,0.368,0.248,124.91,"pop, R&B" -Demi Lovato,Give Your Heart a Break,205346,False,2011,71,0.651,0.695,6,-3.218,1,0.0487,0.23,0.0,0.144,0.569,123.008,pop -Drake,Take Care,277386,True,2011,75,0.629,0.515,0,-10.358,0,0.265,0.0267,1.22e-05,0.0888,0.299,121.845,"hip hop, pop, R&B" -Little Mix,Wings,219733,False,2012,0,0.738,0.875,7,-3.141,1,0.127,0.000673,0.000556,0.285,0.538,114.962,pop -Bruno Mars,It Will Rain,257720,False,2011,74,0.576,0.835,2,-6.826,1,0.0486,0.337,0.0,0.082,0.476,150.017,pop -Cher Lloyd,Want U Back,214013,False,2011,67,0.696,0.893,9,-2.963,1,0.076,0.0662,0.0,0.473,0.573,97.954,"pop, Dance/Electronic" -Lorde,Royals,190185,False,2013,77,0.674,0.428,7,-9.504,1,0.122,0.121,0.0,0.132,0.337,84.878,"pop, Dance/Electronic" -Robin Thicke,Blurred Lines,263053,False,2013,55,0.861,0.504,7,-7.707,1,0.0489,0.00412,1.78e-05,0.0783,0.881,120.0,"pop, R&B" -Imagine Dragons,Radioactive,186813,False,2012,77,0.448,0.784,9,-3.686,1,0.0627,0.106,0.000108,0.668,0.236,136.245,rock -Avicii,Wake Me Up,247426,False,2013,6,0.532,0.783,2,-5.697,1,0.0523,0.0038,0.0012,0.161,0.643,124.08,"pop, Dance/Electronic" -Bruno Mars,Locked out of Heaven,233478,False,2012,85,0.726,0.698,5,-4.165,1,0.0431,0.049,0.0,0.309,0.867,143.994,pop -Macklemore & Ryan Lewis,Thrift Shop (feat. Wanz),235613,True,2012,1,0.781,0.526,6,-6.985,0,0.293,0.0619,0.0,0.0457,0.662,94.992,"hip hop, pop" -Daft Punk,Get Lucky (feat. Pharrell Williams & Nile Rodgers) - Radio Edit,248413,False,2013,83,0.794,0.811,6,-8.966,0,0.038,0.0426,1.07e-06,0.101,0.862,116.047,"hip hop, Dance/Electronic" -will.i.am,Scream & Shout,283400,True,2013,78,0.772,0.685,5,-6.849,1,0.0696,0.019,8.96e-05,0.131,0.501,130.033,"hip hop, pop" -Stromae,Papaoutai,232146,False,2013,0,0.733,0.818,10,-7.222,0,0.0859,0.0241,0.0,0.0636,0.253,116.019,pop -Bingo Players,Get Up (Rattle) - Vocal Edit,166933,False,2013,1,0.801,0.985,7,-2.69,1,0.0645,0.0205,6.86e-06,0.296,0.722,127.99,"pop, Dance/Electronic" -Naughty Boy,La La La,220779,False,2013,58,0.754,0.677,6,-4.399,0,0.0316,0.112,0.0,0.111,0.254,124.988,Dance/Electronic -Tom Odell,Another Love,244360,True,2013,88,0.445,0.537,4,-8.532,0,0.04,0.695,1.65e-05,0.0944,0.131,122.769,pop -2 Chainz,We Own It (Fast & Furious),227906,False,2013,69,0.563,0.902,1,-4.586,0,0.402,0.0545,0.0,0.0524,0.559,171.999,"hip hop, pop" -David Guetta,Play Hard (feat. Ne-Yo & Akon),201000,False,2012,69,0.691,0.921,8,-1.702,0,0.0533,0.173,0.0,0.331,0.8,130.072,"hip hop, pop, Dance/Electronic" -Pitbull,Feel This Moment (feat. Christina Aguilera),229506,False,2012,77,0.673,0.758,7,-3.632,1,0.158,0.039,0.0,0.341,0.542,135.956,"hip hop, pop, latin" -will.i.am,#thatPOWER,279506,False,2013,68,0.797,0.608,6,-6.096,0,0.0584,0.00112,7.66e-05,0.0748,0.402,127.999,"hip hop, pop" -Lil Wayne,Love Me,255053,True,2013,68,0.669,0.634,11,-6.476,1,0.0327,0.0125,0.0,0.0946,0.496,124.906,"hip hop, pop" -Macklemore & Ryan Lewis,Can't Hold Us (feat. Ray Dalton),258342,False,2012,81,0.641,0.922,2,-4.457,1,0.0786,0.0291,0.0,0.0862,0.847,146.078,"hip hop, pop" -Martin Garrix,Animals - Radio Edit,176117,False,2013,0,0.593,0.914,1,-5.351,1,0.0363,0.00137,0.445,0.0714,0.0381,128.015,"pop, Dance/Electronic" -Jason Derulo,Talk Dirty (feat. 2 Chainz),177685,True,2013,56,0.76,0.652,6,-7.321,1,0.232,0.0348,0.0,0.307,0.759,100.315,"hip hop, pop" -John Newman,Love Me Again,239894,False,2013,74,0.495,0.894,2,-4.814,0,0.0441,0.00453,0.000596,0.103,0.213,126.03,set() -Britney Spears,Work B**ch,247853,True,2013,0,0.63,0.816,9,-6.535,1,0.15,0.124,0.00111,0.0616,0.85,128.012,pop -The Neighbourhood,Sweater Weather,240400,False,2013,89,0.612,0.807,10,-2.81,1,0.0336,0.0495,0.0177,0.101,0.398,124.053,"rock, pop" -Sebastian Ingrosso,Reload - Radio Edit,221272,False,2013,68,0.485,0.724,9,-4.633,0,0.0521,0.0736,0.0,0.0631,0.433,128.045,"pop, Dance/Electronic" -Capital Cities,Safe And Sound,192693,False,2013,0,0.652,0.783,0,-4.829,1,0.0309,0.000189,0.0173,0.0889,0.836,117.952,pop -Demi Lovato,Heart Attack,210840,False,2013,79,0.504,0.785,8,-4.802,1,0.104,0.0738,0.0,0.239,0.502,173.968,pop -Arctic Monkeys,Why'd You Only Call Me When You're High?,161123,False,2013,84,0.691,0.631,2,-6.478,1,0.0368,0.0483,1.13e-05,0.104,0.8,92.004,rock -Justin Bieber,Beauty And A Beat,227986,False,2012,73,0.602,0.843,0,-4.831,1,0.0593,0.000688,5.27e-05,0.0682,0.526,128.003,pop -Eminem,The Monster,250188,True,2013,75,0.781,0.853,1,-3.68,0,0.0715,0.0525,0.0,0.12,0.624,110.049,hip hop -Pitbull,Don't Stop the Party (feat. TJR),206120,False,2012,64,0.722,0.958,4,-3.617,1,0.0912,0.00726,0.0,0.375,0.952,127.008,"hip hop, pop, latin" -Selena Gomez,Come & Get It,231986,False,2013,69,0.546,0.787,7,-4.1,0,0.0517,0.0101,0.00021,0.0809,0.573,79.979,pop -P!nk,True Love (feat. Lily Allen),230733,True,2012,71,0.457,0.823,9,-4.76,1,0.311,0.00144,0.0,0.108,0.578,192.205,pop -Disclosure,White Noise,277687,False,2013,54,0.665,0.844,10,-6.164,0,0.0501,0.00963,0.000215,0.311,0.902,119.978,"pop, Dance/Electronic" -Flo Rida,I Cry,223800,False,2012,60,0.693,0.822,4,-5.441,0,0.0439,0.00616,1.79e-06,0.315,0.763,126.035,"hip hop, pop" -Kanye West,Black Skinhead,188013,True,2013,0,0.578,0.825,1,-6.107,1,0.322,0.000986,0.0,0.176,0.283,130.089,hip hop -Lana Del Rey,Summertime Sadness (Lana Del Rey Vs. Cedric Gervais) - Cedric Gervais Remix,214912,False,2013,72,0.572,0.81,1,-5.791,0,0.0558,0.0157,6.53e-06,0.13,0.11,126.052,pop -Eminem,Berzerk,238746,True,2013,66,0.739,0.872,11,-4.059,0,0.333,0.0217,0.0,0.26,0.684,95.084,hip hop -Icona Pop,I Love It (feat. Charli XCX),157152,True,2013,68,0.711,0.906,8,-2.671,1,0.0284,0.00952,1.64e-05,0.153,0.824,125.916,"pop, Dance/Electronic" -Fall Out Boy,My Songs Know What You Did In The Dark (Light Em Up),186826,False,2013,71,0.558,0.924,9,-4.341,0,0.064,0.0271,3.32e-06,0.537,0.567,151.99,rock -Lorde,Team,193058,False,2013,0,0.691,0.582,6,-7.444,1,0.0939,0.162,0.0,0.247,0.427,100.05,"pop, Dance/Electronic" -Avicii,I Could Be The One (Avicii Vs. Nicky Romero) - Radio Edit,208316,False,2012,68,0.509,0.79,6,-3.782,0,0.0374,0.332,6.67e-05,0.316,0.638,127.946,"pop, Dance/Electronic" -Imagine Dragons,Demons,177506,False,2012,81,0.505,0.71,3,-3.015,1,0.0321,0.19,0.00025,0.269,0.428,89.938,rock -One Direction,One Way or Another (Teenage Kicks),157293,False,2013,66,0.489,0.867,4,-3.121,0,0.071,0.025,0.0,0.586,0.409,162.131,pop -Taylor Swift,I Knew You Were Trouble.,219720,False,2012,76,0.622,0.469,3,-6.798,0,0.0363,0.00454,2.25e-06,0.0335,0.679,77.019,pop -Arctic Monkeys,Do I Wanna Know?,272394,False,2013,84,0.548,0.532,5,-7.596,1,0.0323,0.186,0.000263,0.217,0.405,85.03,rock -Rudimental,Waiting All Night (feat. Ella Eyre),292586,False,2013,66,0.544,0.728,6,-5.358,0,0.05,0.00264,0.00959,0.427,0.281,174.983,"pop, Dance/Electronic" -Klangkarussell,Sonnentanz - Sun Don't Shine,238120,False,2014,68,0.579,0.549,5,-8.262,0,0.0909,0.104,0.0202,0.0964,0.155,119.74,"pop, Dance/Electronic" -Calvin Harris,Drinking from the Bottle (feat. Tinie Tempah),240346,False,2012,61,0.665,0.886,9,-4.175,0,0.0514,0.0469,6.24e-05,0.0525,0.53,128.062,"hip hop, pop, Dance/Electronic" -A$AP Rocky,"F**kin' Problems (feat. Drake, 2 Chainz & Kendrick Lamar)",233786,True,2013,76,0.853,0.693,1,-6.87,1,0.275,0.0239,0.0,0.11,0.662,95.967,hip hop -P!nk,Try,247906,False,2012,76,0.674,0.628,2,-7.079,1,0.03,0.00144,0.0,0.0944,0.552,103.998,pop -Ylvis,The Fox (What Does the Fox Say?),213708,False,2013,63,0.703,0.867,6,-4.292,1,0.0453,0.107,0.0,0.119,0.546,128.008,hip hop -Ellie Goulding,Burn,231211,False,2014,70,0.559,0.777,1,-5.031,1,0.0432,0.31,0.0,0.105,0.329,87.016,"pop, rock, Dance/Electronic" -Baauer,Harlem Shake,196664,False,2013,57,0.452,0.794,0,-5.151,1,0.0483,0.0111,0.00182,0.416,0.282,137.825,Dance/Electronic -Calvin Harris,I Need Your Love (feat. Ellie Goulding),234506,False,2012,70,0.695,0.869,8,-5.066,1,0.0483,0.41,0.0,0.237,0.58,124.989,"hip hop, pop, Dance/Electronic" -Katy Perry,This Is How We Do,204285,False,2013,60,0.69,0.636,9,-6.028,0,0.0457,0.0203,0.0,0.147,0.8,96.0,pop -AWOLNATION,Sail,259102,False,2011,2,0.825,0.435,1,-9.582,1,0.0568,0.452,0.609,0.0953,0.243,119.038,"pop, rock" -Duke Dumont,Need U (100%) [feat. A*M*E] - Radio Edit,174853,False,2013,27,0.681,0.835,0,-5.705,0,0.049,0.00176,0.00183,0.354,0.469,124.047,"pop, Dance/Electronic" -Jason Derulo,The Other Side,226986,False,2013,50,0.561,0.836,9,-3.939,1,0.1,0.0525,0.0,0.136,0.517,127.923,"hip hop, pop" -Storm Queen,Look Right Through - MK Vocal Edit,150400,False,2014,0,0.832,0.815,0,-8.035,0,0.081,0.00304,0.00553,0.263,0.519,119.995,Dance/Electronic -Calvin Harris,Sweet Nothing (feat. Florence Welch),212560,False,2012,71,0.573,0.929,8,-3.942,0,0.109,0.197,0.000112,0.0567,0.582,127.934,"hip hop, pop, Dance/Electronic" -Katy Perry,Unconditionally,228878,False,2013,0,0.555,0.729,7,-4.813,1,0.0387,0.00357,0.0,0.209,0.369,129.003,pop -Wale,Bad (feat. Rihanna) - Remix,238826,True,2013,61,0.807,0.702,6,-6.594,0,0.0748,0.307,0.0,0.111,0.447,114.038,"hip hop, pop, R&B" -Ciara,Body Party,234040,False,2013,64,0.577,0.576,2,-6.299,1,0.0563,0.0143,0.0,0.602,0.12,133.973,"pop, R&B" -Drake,Started From the Bottom,174120,True,2013,65,0.794,0.523,8,-7.829,1,0.161,0.0331,0.0,0.156,0.56,86.307,"hip hop, pop, R&B" -Birdy,Wings,252106,False,2013,48,0.474,0.707,7,-4.534,1,0.0253,0.0831,0.0,0.162,0.224,83.235,pop -Bastille,Of The Night,214205,False,2013,61,0.67,0.829,5,-7.2,0,0.0427,0.0192,0.000225,0.089,0.349,125.01,"pop, rock" -Olly Murs,Dear Darlin',206373,False,2012,59,0.512,0.828,11,-4.672,0,0.0454,0.00627,8.73e-06,0.119,0.34,124.021,"pop, Dance/Electronic" -Katy Perry,Roar,223546,False,2013,73,0.554,0.772,7,-4.821,0,0.0418,0.00487,6.6e-06,0.354,0.455,179.984,pop -Blake Shelton,Sure Be Cool If You Did,215720,False,2013,56,0.576,0.726,11,-4.625,1,0.035,0.255,0.0,0.108,0.582,136.802,country -Calvin Harris,Thinking About You (feat. Ayah Marar),247933,False,2012,65,0.725,0.874,0,-3.715,0,0.0396,0.00262,0.000412,0.0958,0.748,127.985,"hip hop, pop, Dance/Electronic" -JAY-Z,Holy Grail,338413,True,2013,62,0.676,0.534,2,-6.901,0,0.0831,0.0594,8.59e-06,0.256,0.156,145.082,hip hop -Bridgit Mendler,Ready or Not,200946,False,2012,72,0.715,0.872,2,-3.835,1,0.0509,0.00351,3.84e-05,0.107,0.748,93.043,pop -The Saturdays,What About Us,220682,False,2013,61,0.704,0.68,7,-4.751,1,0.0581,0.0059,0.0,0.213,0.689,123.973,"pop, Dance/Electronic" -Bastille,Pompeii,214147,False,2013,74,0.679,0.715,9,-6.383,1,0.0407,0.0755,0.0,0.271,0.571,127.435,"pop, rock" -One Direction,Best Song Ever,200106,False,2013,76,0.652,0.877,1,-2.986,1,0.0465,0.0227,0.0,0.0789,0.486,118.491,pop -Nelly,Hey Porsche,209466,True,2013,57,0.726,0.795,9,-4.653,1,0.029,0.136,0.0,0.698,0.952,115.995,"hip hop, pop, R&B" -Zedd,Clarity,271426,False,2012,0,0.523,0.78,8,-3.464,1,0.0753,0.0366,0.0,0.0751,0.193,128.006,"hip hop, pop, Dance/Electronic" -Miley Cyrus,Wrecking Ball,221360,False,2013,77,0.53,0.422,5,-6.262,1,0.0342,0.407,0.0,0.107,0.349,119.964,pop -Chris Brown,Fine China,213666,False,2013,0,0.659,0.735,10,-4.758,0,0.056,0.0466,0.0,0.111,0.609,104.038,"hip hop, pop, R&B" -Daft Punk,Lose Yourself to Dance (feat. Pharrell Williams),353893,False,2013,69,0.832,0.659,10,-7.828,0,0.057,0.0839,0.00114,0.0753,0.674,100.163,"hip hop, Dance/Electronic" -Armin van Buuren,This Is What It Feels Like,204360,False,2013,66,0.551,0.833,8,-5.217,1,0.03,0.0391,2.77e-06,0.0632,0.145,129.885,"pop, Dance/Electronic" -Justin Timberlake,Mirrors,484146,False,2013,78,0.574,0.512,5,-6.664,0,0.0503,0.234,0.0,0.0946,0.512,76.899,pop -Bruno Mars,Treasure,178560,True,2012,78,0.874,0.692,5,-5.28,0,0.0431,0.0412,7.24e-05,0.324,0.937,116.017,pop -Of Monsters and Men,Little Talks,266600,False,2012,79,0.457,0.757,1,-5.177,1,0.032,0.0206,0.0,0.146,0.417,102.961,"pop, rock, Folk/Acoustic" -Miley Cyrus,We Can't Stop,231240,False,2013,78,0.613,0.622,1,-5.794,0,0.0334,0.00882,0.0,0.37,0.484,80.003,pop -J. Cole,Power Trip (feat. Miguel),241160,True,2013,0,0.667,0.61,1,-7.054,1,0.217,0.322,0.000203,0.426,0.465,99.991,hip hop -Paramore,Still into You,216013,False,2013,79,0.602,0.923,5,-3.763,1,0.044,0.0098,0.0,0.0561,0.765,136.01,"pop, rock" -Christina Perri,human,250706,False,2013,59,0.439,0.489,8,-6.286,1,0.0368,0.132,0.000643,0.114,0.253,143.808,pop -Drake,"Hold On, We're Going Home",227880,False,2013,72,0.773,0.414,6,-7.436,0,0.0961,0.00411,3.4e-05,0.0733,0.289,99.993,"hip hop, pop, R&B" -James Blunt,Bonfire Heart,238000,False,2013,0,0.575,0.821,0,-5.7,1,0.0527,0.181,0.0,0.124,0.449,118.021,pop -Maroon 5,Daylight,225066,False,2012,66,0.656,0.674,7,-5.473,1,0.0269,0.00242,0.0,0.255,0.369,120.001,pop -Childish Gambino,3005,235662,True,2013,0,0.681,0.463,6,-6.542,0,0.289,0.128,0.0,0.0769,0.661,82.92,hip hop -Avril Lavigne,Here's to Never Growing Up,214080,True,2013,0,0.483,0.871,0,-3.084,1,0.101,0.015,0.0,0.415,0.716,164.986,pop -Taylor Swift,22,232120,False,2012,68,0.661,0.729,7,-6.561,1,0.0376,0.00215,0.0013,0.0477,0.668,103.987,pop -Passenger,Let Her Go,252866,False,2012,73,0.509,0.538,7,-7.335,1,0.0572,0.385,0.0,0.104,0.244,75.089,pop -Ariana Grande,The Way,227026,False,2013,65,0.645,0.878,5,-3.208,0,0.113,0.294,0.0,0.076,0.862,82.324,pop -Rihanna,Stay,240706,False,2012,76,0.621,0.31,9,-10.164,0,0.0283,0.945,6.12e-05,0.117,0.125,111.893,"hip hop, pop, R&B" -Justin Timberlake,Suit & Tie (feat. Jay-Z),326280,True,2013,67,0.795,0.596,11,-3.799,0,0.219,0.103,0.0,0.164,0.357,101.985,pop -Bruno Mars,When I Was Your Man,213826,False,2012,83,0.612,0.28,0,-8.648,1,0.0434,0.932,0.0,0.088,0.387,72.795,pop -Florida Georgia Line,Cruise,208960,False,2012,57,0.457,0.948,10,-3.364,1,0.0354,0.0191,0.0,0.0536,0.878,148.0,country -Pharrell Williams,"Happy - From ""Despicable Me 2""",232720,False,2014,79,0.647,0.822,5,-4.662,0,0.183,0.219,0.0,0.0908,0.962,160.019,"hip hop, pop" -Katy Perry,Dark Horse,215672,False,2013,74,0.647,0.585,6,-6.123,1,0.0512,0.00314,0.0,0.165,0.353,131.934,pop -ScHoolboy Q,Collard Greens,299960,True,2014,0,0.826,0.571,11,-4.871,1,0.064,0.0239,7.38e-06,0.207,0.666,153.972,"hip hop, pop" -Iggy Azalea,Fancy,199938,True,2014,69,0.912,0.716,10,-4.141,0,0.0697,0.0904,0.0,0.0491,0.377,94.981,"hip hop, pop" -Maroon 5,Animals,231013,False,2014,79,0.279,0.742,4,-6.46,0,0.0898,0.000185,0.0,0.593,0.328,189.868,pop -Sam Smith,Stay With Me,172723,False,2014,80,0.418,0.42,0,-6.444,1,0.0414,0.588,6.39e-05,0.11,0.184,84.094,pop -MAGIC!,Rude,224840,False,2014,80,0.773,0.758,1,-4.993,1,0.0381,0.0422,0.0,0.305,0.925,144.033,set() -Sia,Chandelier,216120,False,2014,78,0.399,0.787,1,-2.88,1,0.0499,0.0197,6.07e-05,0.0685,0.572,117.089,pop -John Legend,All of Me,269560,False,2013,84,0.422,0.264,8,-7.064,1,0.0322,0.922,0.0,0.132,0.331,119.93,"pop, R&B" -ZHU,Faded,223386,False,2014,49,0.861,0.475,9,-7.195,0,0.0487,0.00865,0.119,0.122,0.599,124.96,Dance/Electronic -Meghan Trainor,All About That Bass,187920,True,2015,72,0.807,0.887,9,-3.726,1,0.0503,0.0573,2.87e-06,0.124,0.961,134.052,pop -David Guetta,Lovers on the Sun (feat. Sam Martin),203520,False,2014,50,0.645,0.891,6,-2.505,0,0.0387,0.0932,3.88e-06,0.379,0.568,124.915,"hip hop, pop, Dance/Electronic" -Iggy Azalea,Black Widow,209423,True,2014,67,0.743,0.72,11,-3.753,1,0.124,0.192,0.000386,0.109,0.519,163.99,"hip hop, pop" -Tove Lo,Habits (Stay High),209160,False,2014,74,0.733,0.65,5,-3.539,1,0.0315,0.0703,6.59e-05,0.0829,0.348,110.003,"pop, Dance/Electronic" -Oliver Heldens,Gecko (Overdrive) - Radio Edit,165440,False,2014,67,0.609,0.885,0,-5.469,1,0.0642,0.00521,1.15e-05,0.336,0.76,124.959,"pop, Dance/Electronic" -DJ Snake,You Know You Like It,247266,False,2014,69,0.407,0.725,5,-5.346,0,0.188,0.0141,2.46e-06,0.306,0.247,196.093,"hip hop, pop, Dance/Electronic" -Jason Derulo,Talk Dirty (feat. 2 Chainz),177685,True,2013,56,0.76,0.652,6,-7.321,1,0.232,0.0348,0.0,0.307,0.759,100.315,"hip hop, pop" -Maroon 5,Maps,189840,False,2014,0,0.737,0.723,1,-5.51,0,0.0295,0.0179,0.0,0.0675,0.893,120.003,pop -David Guetta,Bad (feat. Vassy) - Radio Edit,170625,False,2014,45,0.614,0.972,5,-3.927,0,0.088,0.00125,0.0186,0.328,0.411,127.966,"hip hop, pop, Dance/Electronic" -Tinashe,2 On (feat. ScHoolboy Q),227000,True,2014,71,0.742,0.595,9,-7.51,1,0.107,0.15,0.0,0.111,0.431,101.013,"hip hop, pop, R&B, Dance/Electronic" -Route 94,My Love (feat. Jess Glynne),259934,False,2014,72,0.814,0.622,8,-7.573,1,0.0492,0.000132,0.72,0.0658,0.726,119.976,"pop, Dance/Electronic" -Taylor Swift,Bad Blood,211933,False,2014,54,0.646,0.794,7,-6.104,1,0.19,0.0885,6.16e-06,0.201,0.287,170.216,pop -Trey Songz,Na Na,231906,False,2015,62,0.67,0.476,9,-6.253,0,0.0406,0.328,0.0,0.104,0.235,96.975,"hip hop, pop, R&B" -Calvin Harris,Under Control (feat. Hurts),184280,False,2014,72,0.544,0.915,8,-3.405,1,0.0841,0.129,0.000914,0.115,0.51,126.094,"hip hop, pop, Dance/Electronic" -O.T. Genasis,CoCo,239573,True,2014,61,0.886,0.628,8,-5.949,1,0.131,0.00215,1.15e-05,0.31,0.178,119.906,"hip hop, pop" -Jason Derulo,Wiggle (feat. Snoop Dogg),193295,False,2013,66,0.697,0.621,9,-6.886,0,0.25,0.0802,0.0,0.162,0.721,81.946,"hip hop, pop" -Lilly Wood and The Prick,Prayer in C - Robin Schulz Radio Edit,189399,False,2014,66,0.76,0.886,9,-5.356,0,0.0258,0.0219,7.43e-06,0.623,0.78,123.002,"pop, rock" -Jeremih,Don't Tell 'Em,266840,True,2015,74,0.856,0.527,2,-5.225,1,0.0997,0.392,0.0,0.11,0.386,98.052,"hip hop, pop, R&B" -Nicki Minaj,Anaconda,260240,True,2014,66,0.964,0.605,9,-6.223,1,0.179,0.0668,7.78e-06,0.214,0.646,129.994,"hip hop, pop" -Calvin Harris,Blame (feat. John Newman),212960,False,2014,75,0.414,0.857,0,-4.078,0,0.0808,0.0287,0.00574,0.343,0.348,128.024,"hip hop, pop, Dance/Electronic" -DJ Snake,Turn Down for What,213733,False,2013,70,0.818,0.799,1,-4.1,0,0.156,0.00107,0.128,0.057,0.0815,100.014,"hip hop, pop, Dance/Electronic" -Pitbull,Timber (feat. Ke$ha),204160,False,2012,80,0.581,0.963,11,-4.087,1,0.0981,0.0295,0.0,0.139,0.788,129.992,"hip hop, pop, latin" -OneRepublic,Counting Stars,257386,False,2014,62,0.663,0.705,1,-4.972,0,0.0385,0.0654,0.0,0.117,0.477,122.014,pop -Bobby Shmurda,Hot N*gga,194561,True,2014,73,0.794,0.51,9,-7.314,1,0.42,0.0505,1.81e-06,0.0562,0.18,167.879,"hip hop, pop" -Calvin Harris,Summer,224506,False,2014,21,0.603,0.861,4,-3.565,0,0.0325,0.0215,0.043,0.1,0.72,127.962,"hip hop, pop, Dance/Electronic" -ScHoolboy Q,Man Of The Year,216013,True,2014,0,0.743,0.861,3,-5.234,1,0.0428,0.00518,0.0,0.181,0.391,111.959,"hip hop, pop" -Nico & Vinz,Am I Wrong,245866,False,2014,76,0.729,0.675,8,-6.003,1,0.0312,0.175,1.58e-06,0.55,0.779,119.968,"hip hop, pop" -Becky G,Shower,206166,False,2014,78,0.699,0.529,2,-7.548,1,0.0487,0.0317,3.59e-05,0.285,0.121,119.987,"pop, latin" -Sigma,Nobody To Love - Radio Edit,189720,False,2014,62,0.441,0.921,8,-3.32,1,0.0436,0.000526,6.34e-06,0.177,0.182,175.001,"pop, Dance/Electronic" -The Chainsmokers,#SELFIE - Original Mix,183750,False,2014,0,0.789,0.915,0,-3.263,1,0.248,0.0135,8.77e-06,0.0818,0.66,127.955,"pop, Dance/Electronic" -Lorde,Team,193058,False,2013,76,0.69,0.578,6,-7.436,1,0.0929,0.167,0.0,0.305,0.416,99.961,"pop, Dance/Electronic" -Rixton,Me And My Broken Heart,193733,False,2014,77,0.545,0.783,5,-4.261,0,0.0345,0.00489,0.0,0.132,0.551,174.084,pop -Klingande,Jubel - Radio Edit,201626,False,2013,50,0.686,0.524,0,-7.251,1,0.0343,0.649,0.0569,0.14,0.0951,124.938,"pop, Dance/Electronic" -Mr. Probz,Waves - Robin Schulz Radio Edit,208133,False,2014,2,0.829,0.51,5,-9.334,0,0.0369,0.00821,0.0014,0.0829,0.45,119.993,"hip hop, Dance/Electronic" -Paloma Faith,Only Love Can Hurt Like This,232893,False,2014,80,0.566,0.885,8,-4.528,1,0.0818,0.0958,9.97e-05,0.334,0.304,90.99,"pop, R&B, Dance/Electronic" -Kiesza,Hideaway,251986,False,2014,64,0.838,0.72,7,-4.135,0,0.0483,0.0862,0.00696,0.0772,0.203,122.993,"pop, Dance/Electronic" -Ellie Goulding,Burn,231211,False,2014,70,0.559,0.777,1,-5.031,1,0.0432,0.31,0.0,0.105,0.329,87.016,"pop, rock, Dance/Electronic" -Ariana Grande,Break Free,214840,False,2014,0,0.687,0.702,7,-5.324,0,0.0455,0.0064,4.35e-05,0.204,0.284,129.956,pop -Kid Ink,Show Me (feat. Chris Brown),217800,True,2014,68,0.711,0.508,7,-7.299,1,0.0502,0.0355,0.0,0.141,0.663,98.098,"hip hop, pop" -Tove Lo,Talking Body,238426,True,2014,66,0.736,0.761,4,-5.449,0,0.0338,0.0966,0.0,0.0823,0.114,119.999,"pop, Dance/Electronic" -Chris Brown,Loyal (feat. Lil Wayne & Tyga),264946,True,2014,72,0.841,0.522,10,-5.963,0,0.049,0.0168,1.37e-06,0.188,0.616,99.059,"hip hop, pop, R&B" -OneRepublic,Love Runs Out,224226,False,2014,59,0.719,0.935,7,-3.752,1,0.0589,0.167,0.0,0.0973,0.738,120.022,pop -Olly Murs,Up (feat. Demi Lovato),224293,False,2014,0,0.69,0.845,9,-4.676,1,0.0339,0.0193,0.0,0.101,0.638,114.948,"pop, Dance/Electronic" -Peking Duk,High (feat. Nicole Millar),228000,False,2014,48,0.525,0.803,0,-4.052,0,0.0492,0.00129,0.000868,0.29,0.383,100.094,Dance/Electronic -Milky Chance,Stolen Dance,313684,False,2014,73,0.885,0.581,11,-8.813,1,0.0378,0.427,0.000204,0.0759,0.728,114.016,set() -Ed Sheeran,Don't,219840,False,2014,72,0.806,0.608,1,-7.008,1,0.0659,0.0113,0.0,0.635,0.849,95.049,pop -Ariana Grande,Love Me Harder,236133,False,2014,74,0.472,0.714,1,-4.389,0,0.0334,0.00937,0.0,0.0764,0.24,98.992,pop -Enrique Iglesias,Bailando - Spanish Version,243386,False,2014,66,0.718,0.792,7,-3.519,1,0.105,0.0467,3.65e-06,0.0399,0.96,90.949,"pop, latin" -Avicii,Hey Brother,255093,False,2013,2,0.545,0.78,7,-4.867,0,0.0436,0.0309,4.64e-05,0.0828,0.458,125.014,"pop, Dance/Electronic" -Jessie J,Bang Bang,199386,False,2014,77,0.706,0.786,0,-3.417,0,0.091,0.26,0.0,0.38,0.751,150.028,"hip hop, pop" -Duke Dumont,I Got U,285596,False,2014,67,0.636,0.761,9,-7.752,0,0.035,0.00377,0.00784,0.0851,0.463,120.837,"pop, Dance/Electronic" -Gorgon City,Ready For Your Love,198880,False,2014,61,0.677,0.753,5,-7.038,1,0.154,0.0531,0.0128,0.333,0.471,121.939,"pop, Dance/Electronic" -Taylor Swift,Style,231000,False,2014,61,0.588,0.791,7,-5.595,1,0.0402,0.00245,0.00258,0.118,0.487,94.933,pop -MKTO,Classic,175426,False,2012,82,0.72,0.791,1,-4.689,1,0.124,0.0384,0.0,0.157,0.756,102.071,"hip hop, pop" -Disclosure,Latch,255631,False,2013,74,0.503,0.727,1,-5.456,1,0.167,0.0159,9.45e-05,0.0895,0.521,121.985,"pop, Dance/Electronic" -Ariana Grande,Problem,193920,False,2014,73,0.66,0.805,1,-5.352,0,0.153,0.0192,8.83e-06,0.159,0.625,103.008,pop -Charli XCX,Boom Clap,169866,False,2014,49,0.659,0.911,4,-2.28,1,0.0786,0.154,0.000304,0.191,0.576,91.999,"pop, Dance/Electronic" -One Direction,Steal My Girl,228133,False,2014,79,0.536,0.768,10,-5.948,0,0.0347,0.00433,0.0,0.114,0.545,77.217,pop -Tiësto,Red Lights,263890,False,2013,0,0.653,0.829,10,-4.783,1,0.0377,0.000739,1.5e-06,0.225,0.545,124.989,"pop, Dance/Electronic" -Selena Gomez,The Heart Wants What It Wants,227360,False,2014,58,0.616,0.789,7,-4.874,0,0.0377,0.053,0.0,0.142,0.621,83.066,pop -Rita Ora,I Will Never Let You Down,203466,False,2014,65,0.753,0.801,4,-3.215,1,0.0296,0.403,0.0,0.128,0.794,128.011,"hip hop, pop, Dance/Electronic" -5 Seconds of Summer,She Looks So Perfect,202496,False,2014,74,0.494,0.951,9,-4.237,1,0.132,0.000569,0.0,0.327,0.441,160.025,pop -The Script,Superheroes,245466,False,2014,74,0.49,0.885,3,-4.121,1,0.0396,0.00218,0.0,0.0741,0.64,166.996,"pop, rock" -Taylor Swift,Shake It Off,219200,False,2014,79,0.647,0.8,7,-5.384,1,0.165,0.0647,0.0,0.334,0.942,160.078,pop -One Direction,Night Changes,226600,False,2014,83,0.672,0.52,8,-7.747,1,0.0353,0.859,0.0,0.115,0.37,120.001,pop -Ed Sheeran,Sing,235382,False,2014,69,0.818,0.67,8,-4.451,0,0.0472,0.304,1.22e-06,0.0601,0.939,119.988,pop -Alesso,Heroes (we could be),209866,False,2014,52,0.521,0.754,5,-4.144,1,0.0634,0.0373,0.0,0.239,0.352,125.876,"pop, Dance/Electronic" -Tiësto,Wasted,188371,False,2014,0,0.645,0.832,2,-5.595,1,0.0294,0.00106,0.00264,0.199,0.375,112.028,"pop, Dance/Electronic" -G.R.L.,Ugly Heart,198306,False,2014,64,0.65,0.786,9,-5.488,1,0.0463,0.019,0.0,0.323,0.446,124.96,pop -Coldplay,A Sky Full of Stars,267866,False,2014,80,0.545,0.675,6,-6.474,1,0.0279,0.00617,0.00197,0.209,0.162,124.97,"rock, pop" -Clean Bandit,Rather Be (feat. Jess Glynne),227833,False,2014,72,0.799,0.586,11,-6.735,1,0.0377,0.162,2.03e-06,0.193,0.549,120.97,"pop, Dance/Electronic" -Zedd,Stay The Night - Featuring Hayley Williams Of Paramore,217346,False,2014,63,0.596,0.738,8,-3.109,1,0.0411,0.109,0.0,0.0947,0.461,127.961,"hip hop, pop, Dance/Electronic" -Sia,Elastic Heart,257200,False,2014,70,0.421,0.791,9,-4.998,1,0.0496,0.0117,1.48e-05,0.146,0.499,130.075,pop -One Direction,Story of My Life,245493,False,2013,81,0.6,0.663,3,-5.802,1,0.0477,0.225,0.0,0.119,0.286,121.07,pop -Alex & Sierra,Little Do You Know,185200,False,2014,75,0.5,0.292,0,-8.554,1,0.0323,0.746,0.0,0.188,0.37,145.879,pop -Omarion,Post to Be (feat. Chris Brown & Jhene Aiko),226580,True,2014,55,0.733,0.676,10,-5.655,0,0.0432,0.0697,0.0,0.208,0.701,97.448,"hip hop, pop, R&B" -Echosmith,Cool Kids,237626,False,2013,71,0.719,0.671,8,-6.279,1,0.0336,0.0372,8.21e-06,0.12,0.786,130.027,pop -Natalie La Rose,Somebody,189906,False,2014,66,0.83,0.52,0,-8.714,1,0.0376,0.000792,1.25e-05,0.0656,0.735,104.99,set() -Vance Joy,Riptide,204280,False,2014,80,0.484,0.731,1,-6.694,1,0.0379,0.431,0.0,0.151,0.51,101.654,"rock, pop" -Beyoncé,Drunk in Love (feat. Jay-Z),323480,True,2014,70,0.589,0.621,5,-6.902,0,0.0468,0.00969,0.00104,0.181,0.401,140.03,"pop, R&B" -Maroon 5,Sugar,235493,True,2014,0,0.744,0.783,1,-7.077,1,0.0337,0.0553,0.0,0.086,0.885,120.042,pop -Ella Henderson,Ghost,213213,False,2014,63,0.68,0.84,9,-3.823,1,0.0414,0.0457,8.66e-06,0.143,0.468,104.975,"pop, Dance/Electronic" -Ed Sheeran,"I See Fire - From ""The Hobbit - The Desolation Of Smaug""",300840,False,2013,71,0.581,0.0549,10,-20.514,0,0.0397,0.559,0.0,0.0718,0.234,152.037,pop -Chris Brown,New Flame (feat. Usher & Rick Ross),244226,True,2014,67,0.702,0.629,1,-4.292,1,0.041,0.0506,0.0,0.0963,0.446,141.967,"hip hop, pop, R&B" -Sheppard,Geronimo,218227,False,2014,0,0.707,0.771,7,-6.275,1,0.0783,0.434,0.0013,0.115,0.437,142.016,"pop, rock, Dance/Electronic" -Olly Murs,Wrapped Up (feat. Travie McCoy),185640,False,2014,0,0.787,0.848,1,-4.696,1,0.0549,0.0915,0.0,0.15,0.947,121.989,"pop, Dance/Electronic" -Sam Smith,Money On My Mind,192670,False,2014,62,0.688,0.841,2,-5.217,1,0.179,0.204,0.000295,0.229,0.743,133.912,pop -Jason Derulo,Trumpets,217306,False,2013,53,0.627,0.703,0,-4.884,1,0.236,0.563,0.0,0.0962,0.64,81.897,"hip hop, pop" -Nicki Minaj,Bed Of Lies,269946,True,2014,53,0.716,0.67,11,-5.779,1,0.194,0.251,0.0,0.109,0.124,86.01,"hip hop, pop" -Nick Jonas,Jealous - Remix,222213,True,2014,0,0.685,0.709,11,-3.578,0,0.0748,0.0132,0.0,0.452,0.504,93.047,pop -Mark Ronson,Uptown Funk (feat. Bruno Mars),269666,True,2015,82,0.856,0.609,0,-7.223,1,0.0824,0.00801,8.15e-05,0.0344,0.928,114.988,pop -Ed Sheeran,Thinking out Loud,281560,False,2014,81,0.781,0.445,2,-6.061,1,0.0295,0.474,0.0,0.184,0.591,78.998,pop -Wiz Khalifa,See You Again (feat. Charlie Puth),229525,False,2015,81,0.689,0.481,10,-7.503,1,0.0815,0.369,1.03e-06,0.0649,0.283,80.025,"hip hop, pop" -Justin Bieber,What Do You Mean?,205680,False,2015,77,0.845,0.567,5,-8.118,0,0.0956,0.59,0.00142,0.0811,0.793,125.02,pop -Major Lazer,Lean On (feat. MØ & DJ Snake),176561,False,2015,73,0.723,0.809,7,-3.081,0,0.0625,0.00346,0.00123,0.565,0.274,98.007,"hip hop, pop, Dance/Electronic" -The Weeknd,"Earned It (Fifty Shades Of Grey) - From The ""Fifty Shades Of Grey"" Soundtrack",252226,False,2014,76,0.659,0.381,2,-5.922,0,0.0304,0.385,0.0,0.0972,0.426,119.844,"pop, R&B" -Ellie Goulding,"Love Me Like You Do - From ""Fifty Shades Of Grey""",252534,False,2015,78,0.262,0.606,8,-6.646,1,0.0484,0.247,0.0,0.125,0.275,189.857,"pop, rock, Dance/Electronic" -Calvin Harris,Outside (feat. Ellie Goulding),227266,False,2014,78,0.646,0.823,2,-4.123,0,0.0394,0.213,0.0,0.322,0.418,128.035,"hip hop, pop, Dance/Electronic" -David Guetta,"Hey Mama (feat. Nicki Minaj, Bebe Rexha & Afrojack)",192560,False,2014,75,0.596,0.73,9,-4.091,1,0.151,0.24,0.0,0.325,0.525,85.979,"hip hop, pop, Dance/Electronic" -Demi Lovato,Cool for the Summer,214746,True,2015,0,0.586,0.613,5,-5.593,0,0.0386,0.00441,0.000183,0.0853,0.319,114.101,pop -Taylor Swift,Blank Space,231826,False,2014,82,0.76,0.703,5,-5.412,1,0.054,0.103,0.0,0.0913,0.57,95.997,pop -The Weeknd,The Hills,242253,True,2015,84,0.585,0.564,0,-7.063,0,0.0515,0.0671,0.0,0.135,0.137,113.003,"pop, R&B" -The Neighbourhood,Daddy Issues,260173,False,2015,85,0.588,0.521,10,-9.461,1,0.0329,0.0678,0.149,0.123,0.337,85.012,"rock, pop" -Deorro,Five More Hours,211975,False,2015,66,0.699,0.883,5,-3.226,0,0.219,0.0288,0.0,0.817,0.499,127.961,"pop, Dance/Electronic" -Avicii,The Nights,176658,False,2014,85,0.527,0.835,6,-5.298,1,0.0433,0.0166,0.0,0.249,0.654,125.983,"pop, Dance/Electronic" -DJ Snake,You Know You Like It,247266,False,2014,69,0.407,0.725,5,-5.346,0,0.188,0.0141,2.46e-06,0.306,0.247,196.093,"hip hop, pop, Dance/Electronic" -Adam Lambert,Ghost Town,208330,True,2015,46,0.703,0.736,4,-5.709,0,0.046,0.186,1.92e-05,0.174,0.274,119.955,pop -Rae Sremmurd,No Type,200080,True,2015,71,0.891,0.486,6,-7.803,1,0.159,0.0158,3.78e-06,0.0925,0.239,125.01,"hip hop, pop" -Taylor Swift,Bad Blood,200106,False,2015,70,0.654,0.655,11,-7.388,0,0.106,0.0294,0.0,0.139,0.221,170.16,pop -Flo Rida,GDFR (feat. Sage the Gemini & Lookas),190185,False,2015,49,0.657,0.827,5,-4.036,1,0.0734,0.000704,0.00534,0.065,0.69,145.889,"hip hop, pop" -Robin Schulz,Sugar (feat. Francesco Yates),219043,False,2015,76,0.636,0.815,5,-5.098,0,0.0581,0.0185,0.0,0.163,0.636,123.063,"pop, Dance/Electronic" -Calvin Harris,How Deep Is Your Love,212640,False,2015,76,0.738,0.868,11,-4.373,0,0.0731,0.0392,0.00169,0.388,0.336,122.003,"hip hop, pop, Dance/Electronic" -Philip George,Wish You Were Mine - Radio Edit,177560,False,2014,65,0.654,0.832,7,-4.164,0,0.0293,0.0126,0.0901,0.102,0.407,123.002,"pop, Dance/Electronic" -Trey Songz,Na Na,231906,False,2015,62,0.67,0.476,9,-6.253,0,0.0406,0.328,0.0,0.104,0.235,96.975,"hip hop, pop, R&B" -Fall Out Boy,Centuries,228360,False,2015,80,0.393,0.858,4,-2.868,0,0.0729,0.00359,0.0,0.102,0.56,176.042,rock -Rob $tone,Chill Bill (feat. J. Davi$ & Spooks),177184,True,2016,76,0.886,0.427,6,-10.028,1,0.145,0.0312,0.00099,0.0906,0.23,108.034,hip hop -Lost Frequencies,Are You With Me - Radio Edit,138842,False,2014,33,0.776,0.574,5,-9.882,0,0.0317,0.466,7.83e-05,0.131,0.412,121.03,"pop, Dance/Electronic" -Twenty One Pilots,Stressed Out,202333,False,2015,83,0.734,0.637,4,-5.677,0,0.141,0.0462,2.29e-05,0.0602,0.648,169.977,rock -Fifth Harmony,Worth It (feat. Kid Ink),224573,False,2015,73,0.884,0.765,8,-3.865,1,0.0882,0.063,7.04e-06,0.118,0.594,99.987,pop -KALEO,Way down We Go,219560,False,2015,63,0.59,0.578,10,-5.798,0,0.0528,0.612,0.000162,0.0837,0.264,81.663,"rock, classical" -Sam Smith,Like I Can,167065,False,2014,79,0.656,0.627,7,-6.627,0,0.0379,0.343,2.17e-05,0.124,0.481,99.933,pop -Major Lazer,Light It Up (feat. Nyla & Fuse ODG) - Remix,166138,False,2015,49,0.746,0.877,9,-3.782,0,0.0666,0.0375,0.000833,0.233,0.751,107.985,"hip hop, pop, Dance/Electronic" -Martin Solveig,Intoxicated - New Radio Mix,159693,False,2015,48,0.8,0.677,0,-4.023,1,0.0393,0.00563,0.00409,0.0838,0.547,125.004,"pop, Dance/Electronic" -Avicii,Waiting For Love,230613,False,2015,79,0.579,0.736,6,-3.863,0,0.0527,0.31,0.0,0.198,0.613,127.999,"pop, Dance/Electronic" -Felix Jaehn,Ain't Nobody (Loves Me Better) (feat. Jasmine Thompson),186146,False,2018,72,0.778,0.566,2,-6.959,0,0.0311,0.672,0.000197,0.0698,0.479,117.971,"hip hop, pop, Dance/Electronic" -Taylor Swift,Wildest Dreams,220440,False,2014,78,0.553,0.664,8,-7.417,1,0.0741,0.0709,0.0056,0.106,0.467,140.06,pop -The Weeknd,In The Night,235653,True,2015,0,0.48,0.682,7,-4.94,1,0.13,0.0696,0.0,0.0463,0.506,167.939,"pop, R&B" -Pitbull,Time of Our Lives,229360,True,2014,80,0.721,0.802,1,-5.797,1,0.0583,0.0921,0.0,0.694,0.724,124.022,"hip hop, pop, latin" -Galantis,Runaway (U & I),227073,False,2015,74,0.506,0.805,1,-4.119,1,0.0469,0.00711,0.00193,0.0856,0.383,126.008,"pop, Dance/Electronic" -Rihanna,Bitch Better Have My Money,219305,True,2015,73,0.781,0.728,1,-4.981,1,0.0621,0.0509,1.94e-06,0.257,0.395,102.99,"hip hop, pop, R&B" -Zara Larsson,Lush Life,202213,False,2015,0,0.658,0.741,7,-2.86,0,0.0536,0.141,0.0,0.189,0.789,98.024,"pop, Dance/Electronic" -One Direction,Drag Me Down,192120,False,2015,79,0.73,0.703,0,-5.672,0,0.0369,0.109,0.0,0.0657,0.595,138.113,pop -Travis Scott,Antidote,262693,True,2015,73,0.713,0.526,1,-5.046,1,0.032,0.00767,0.000148,0.124,0.131,131.05,"hip hop, Dance/Electronic" -Years & Years,King,215360,False,2015,0,0.559,0.848,4,-4.125,0,0.0388,0.0665,0.0,0.382,0.466,119.977,"pop, Dance/Electronic" -Demi Lovato,Confident,205745,True,2015,64,0.594,0.749,3,-6.251,1,0.0677,0.00188,0.000139,0.0869,0.344,130.064,pop -Lemaitre,Closer,270589,False,2017,59,0.583,0.785,2,-3.991,0,0.0337,0.0012,0.0127,0.124,0.355,91.977,pop -Selena Gomez,Same Old Love,229080,False,2015,69,0.672,0.593,11,-4.01,0,0.0304,0.0223,0.0,0.214,0.438,98.02,pop -Pia Mia,Do It Again,207746,True,2015,72,0.712,0.564,8,-6.527,1,0.047,0.0266,0.0,0.0874,0.411,95.95,"pop, R&B" -Flo Rida,My House,192190,False,2015,76,0.688,0.702,7,-4.792,0,0.0499,0.0215,0.0,0.128,0.74,94.006,"hip hop, pop" -Silentó,Watch Me (Whip / Nae Nae),185131,False,2015,0,0.819,0.768,8,-8.522,1,0.134,0.234,0.0,0.334,0.964,139.982,hip hop -Chris Brown,Ayo,225226,True,2015,71,0.823,0.563,6,-5.095,1,0.0443,0.0399,0.0,0.0851,0.682,98.031,"hip hop, pop, R&B" -The Strumbellas,Spirits,203653,False,2016,71,0.556,0.658,0,-6.075,1,0.0265,0.164,0.0,0.113,0.787,80.529,"rock, Folk/Acoustic, pop" -Fetty Wap,Trap Queen,222093,True,2015,68,0.746,0.873,7,-3.803,1,0.128,0.0244,0.0,0.354,0.817,148.075,"hip hop, pop" -Tove Lo,Talking Body,238426,True,2014,66,0.736,0.761,4,-5.449,0,0.0338,0.0966,0.0,0.0823,0.114,119.999,"pop, Dance/Electronic" -Beyoncé,7/11,213506,False,2014,69,0.747,0.705,9,-5.137,0,0.126,0.0128,0.0,0.126,0.56,136.024,"pop, R&B" -Lana Del Rey,High By The Beach,257573,True,2015,67,0.536,0.486,11,-11.067,0,0.0346,0.244,0.00788,0.12,0.0968,131.988,pop -Zara Larsson,Never Forget You,213427,False,2017,73,0.583,0.732,11,-5.728,0,0.0457,0.00312,9.86e-06,0.269,0.276,145.992,"pop, Dance/Electronic" -Jeremih,oui,238320,False,2015,77,0.418,0.724,5,-3.724,1,0.0964,0.213,0.0,0.112,0.604,78.521,"hip hop, pop, R&B" -Drake,Hotline Bling,267066,False,2016,77,0.891,0.628,2,-7.863,1,0.0551,0.00258,0.00019,0.0504,0.552,134.966,"hip hop, pop, R&B" -Selena Gomez,Kill Em With Kindness,217906,False,2015,64,0.757,0.884,10,-5.488,0,0.0404,0.00795,5.87e-05,0.0973,0.398,120.012,pop -Birdy,Let It All Go,280757,False,2015,68,0.383,0.43,10,-8.644,1,0.0302,0.817,1.04e-06,0.0869,0.174,107.005,pop -Fetty Wap,679 (feat. Remy Boyz),196693,True,2015,74,0.618,0.717,7,-5.738,1,0.318,0.00256,0.0,0.625,0.603,190.05,"hip hop, pop" -Ariana Grande,Love Me Harder,236133,False,2014,74,0.472,0.714,1,-4.389,0,0.0334,0.00937,0.0,0.0764,0.24,98.992,pop -The Weeknd,Can't Feel My Face,213520,False,2015,79,0.705,0.769,9,-5.526,0,0.0425,0.113,0.0,0.105,0.583,107.949,"pop, R&B" -SAYGRACE,You Don't Own Me (feat. G-Eazy),199314,False,2015,55,0.336,0.664,7,-5.68,1,0.102,0.166,1.31e-06,0.0575,0.294,186.394,"pop, R&B" -X Ambassadors,Renegades,195200,False,2015,75,0.526,0.862,2,-6.003,1,0.0905,0.0144,0.0597,0.229,0.528,90.052,"hip hop, rock, pop" -R. City,Locked Away (feat. Adam Levine),227480,False,2015,77,0.509,0.671,1,-5.709,1,0.0678,0.304,0.0,0.0452,0.55,118.413,pop -Kygo,Stole the Show,223186,False,2016,74,0.64,0.635,8,-7.565,0,0.226,0.271,0.0,0.319,0.475,100.034,"pop, Dance/Electronic" -Tame Impala,The Less I Know The Better,216320,True,2015,83,0.64,0.74,4,-4.083,1,0.0284,0.0115,0.00678,0.167,0.785,116.879,metal -Selena Gomez,Good For You,221560,True,2015,0,0.6,0.676,5,-6.447,0,0.0652,0.154,0.0,0.0741,0.217,88.928,pop -Fetty Wap,My Way (feat. Monty),213053,True,2015,55,0.748,0.741,6,-3.103,1,0.0531,0.00419,0.0,0.147,0.537,128.077,"hip hop, pop" -Hozier,Take Me To Church,241688,False,2014,81,0.566,0.664,4,-5.303,0,0.0464,0.634,0.0,0.116,0.437,128.945,"Folk/Acoustic, pop" -Taylor Swift,Style,231000,False,2014,77,0.588,0.791,7,-5.595,1,0.0402,0.00245,0.00258,0.118,0.487,94.933,pop -Alessia Cara,Here,199453,False,2015,0,0.379,0.799,0,-4.031,1,0.163,0.0804,0.0,0.0793,0.359,120.892,"pop, R&B" -Sam Smith,"Writing's On The Wall - From ""Spectre"" Soundtrack",278987,False,2015,66,0.261,0.302,5,-8.19,0,0.0339,0.769,2.57e-06,0.0863,0.0883,81.418,pop -David Guetta,Bang My Head (feat. Sia & Fetty Wap),193333,False,2015,47,0.599,0.869,0,-3.697,1,0.0789,0.0525,0.00719,0.103,0.593,108.061,"hip hop, pop, Dance/Electronic" -Meek Mill,All Eyes on You (feat. Chris Brown & Nicki Minaj),223973,True,2015,68,0.589,0.658,11,-5.288,0,0.203,0.0242,0.0,0.118,0.251,77.521,"hip hop, pop" -Big Sean,I Don't Fuck With You,284386,True,2015,75,0.824,0.733,1,-5.474,1,0.0613,0.0362,0.0,0.325,0.395,97.972,"hip hop, pop" -Kygo,Firestone,271640,False,2016,76,0.704,0.634,11,-7.374,0,0.0428,0.393,3.17e-05,0.0952,0.411,113.927,"pop, Dance/Electronic" -Drake,Jumpman,205879,True,2015,0,0.853,0.558,1,-7.448,1,0.202,0.0462,0.0,0.341,0.654,142.079,"hip hop, pop, R&B" -Petit Biscuit,Sunset Lover,237792,False,2015,71,0.783,0.467,6,-9.474,1,0.0503,0.729,0.809,0.109,0.236,90.838,Dance/Electronic -Shawn Mendes,Stitches,206880,False,2015,78,0.746,0.754,1,-6.684,1,0.0676,0.0152,0.0,0.0486,0.746,149.882,pop -Flo Rida,"I Don't Like It, I Love It (feat. Robin Thicke & Verdine White)",224258,False,2015,67,0.854,0.766,9,-4.697,0,0.141,0.0242,0.0,0.0793,0.784,118.004,"hip hop, pop" -Selena Gomez,The Heart Wants What It Wants,227373,False,2014,64,0.616,0.789,7,-4.874,0,0.0377,0.053,0.0,0.142,0.621,83.066,pop -Jack Ü,Where Are Ü Now (with Justin Bieber),250285,False,2015,74,0.432,0.781,4,-4.038,0,0.0567,0.041,4.21e-06,0.0789,0.197,139.432,"pop, Dance/Electronic" -Ellie Goulding,On My Mind,213445,False,2015,0,0.699,0.688,2,-6.607,0,0.0522,0.264,4.11e-06,0.0863,0.742,154.943,"pop, rock, Dance/Electronic" -Bryson Tiller,Don't,198293,True,2015,78,0.765,0.356,11,-5.556,0,0.195,0.223,0.0,0.0963,0.189,96.991,"hip hop, pop, R&B" -Rich Homie Quan,"Flex (Ooh, Ooh, Ooh)",176674,True,2015,1,0.676,0.615,6,-7.534,0,0.0496,0.0384,0.0,0.341,0.883,163.993,"hip hop, pop" -Joel Adams,Please Don't Go,210580,False,2015,71,0.513,0.768,4,-4.868,0,0.0587,0.0118,1.94e-05,0.294,0.235,84.264,R&B -Zedd,I Want You To Know,238800,False,2015,0,0.561,0.877,9,-2.215,0,0.0659,0.00621,8.11e-06,0.28,0.375,130.003,"hip hop, pop, Dance/Electronic" -Jonas Blue,Fast Car,212424,False,2015,67,0.644,0.57,9,-6.994,1,0.052,0.484,0.0,0.299,0.527,113.945,"pop, Dance/Electronic" -BØRNS,Electric Love,218173,False,2015,0,0.373,0.858,6,-6.536,0,0.0889,0.00407,0.0016,0.256,0.605,120.063,"rock, pop" -Sigala,Easy Love,229813,False,2018,66,0.68,0.942,9,-4.208,1,0.0631,0.175,0.0013,0.117,0.647,123.976,"pop, Dance/Electronic" -Justin Bieber,Sorry,200786,False,2015,80,0.654,0.76,0,-3.669,0,0.045,0.0797,0.0,0.299,0.41,99.945,pop -Ariana Grande,One Last Time,197266,False,2014,79,0.628,0.593,8,-5.036,1,0.0323,0.093,1.65e-06,0.096,0.104,125.026,pop -Little Mix,Black Magic,211773,False,2015,1,0.777,0.896,4,-4.467,1,0.0619,0.0352,0.0,0.317,0.843,111.987,pop -One Direction,Perfect,230333,False,2015,77,0.647,0.823,2,-5.231,1,0.0762,0.0598,0.0,0.119,0.396,99.933,pop -Years & Years,Shine,255506,False,2015,55,0.667,0.716,1,-5.21,0,0.0349,0.189,0.0,0.104,0.493,108.002,"pop, Dance/Electronic" -Meghan Trainor,Lips Are Movin,182666,False,2015,70,0.775,0.825,7,-5.402,1,0.0464,0.0506,1.03e-06,0.111,0.95,139.091,pop -Nicky Jam,El Perdón - Forgiveness,206666,False,2017,51,0.736,0.719,8,-3.839,1,0.0357,0.256,0.0,0.0909,0.656,90.013,latin -The Weeknd,Starboy,230453,True,2016,0,0.681,0.594,7,-7.028,1,0.282,0.165,3.49e-06,0.134,0.535,186.054,"pop, R&B" -Drake,One Dance,173986,False,2016,84,0.792,0.625,1,-5.609,1,0.0536,0.00776,0.0018,0.329,0.37,103.967,"hip hop, pop, R&B" -Justin Bieber,Love Yourself,233720,False,2015,0,0.609,0.378,4,-9.828,1,0.438,0.835,0.0,0.28,0.515,100.418,pop -The Chainsmokers,Closer,244960,False,2016,83,0.748,0.524,8,-5.599,1,0.0338,0.414,0.0,0.111,0.661,95.01,"pop, Dance/Electronic" -Adele,Hello,295502,False,2015,1,0.471,0.431,5,-6.129,0,0.0342,0.329,0.0,0.0854,0.289,157.98,"pop, R&B" -Desiigner,Panda,246761,True,2016,70,0.576,0.766,10,-4.943,0,0.449,0.028,1.68e-06,0.366,0.236,144.833,"hip hop, pop" -Burak Yeter,Tuesday (feat. Danelle Sandoval),241874,False,2016,72,0.841,0.639,9,-6.052,0,0.0688,0.0156,0.0654,0.0545,0.675,99.002,Dance/Electronic -Sia,Cheap Thrills (feat. Sean Paul),224813,False,2016,67,0.592,0.8,6,-4.931,0,0.215,0.0561,2.01e-06,0.0775,0.728,89.972,pop -OneRepublic,Wherever I Go,169773,False,2016,70,0.552,0.689,10,-6.444,0,0.0425,0.0915,0.0,0.27,0.349,99.961,pop -Rihanna,Work,219320,True,2016,75,0.725,0.534,11,-6.238,1,0.0946,0.0752,0.0,0.0919,0.558,91.974,"hip hop, pop, R&B" -Coldplay,Hymn for the Weekend,258266,False,2015,82,0.491,0.693,0,-6.487,0,0.0377,0.211,6.92e-06,0.325,0.412,90.027,"rock, pop" -Justin Bieber,Sorry,200786,False,2015,80,0.654,0.76,0,-3.669,0,0.045,0.0797,0.0,0.299,0.41,99.945,pop -Justin Timberlake,"CAN'T STOP THE FEELING! (from DreamWorks Animation's ""TROLLS"")",236001,False,2016,76,0.666,0.83,0,-5.715,1,0.0751,0.0123,0.0,0.191,0.702,113.03,pop -WILLOW,Wait a Minute!,196520,False,2015,86,0.764,0.705,3,-5.279,0,0.0278,0.0371,1.94e-05,0.0943,0.672,101.003,"pop, R&B, Dance/Electronic" -Desiigner,Tiimmy Turner,239853,True,2016,67,0.603,0.725,6,-3.054,1,0.0393,0.174,0.0,0.0786,0.304,122.803,"hip hop, pop" -G-Eazy,"Me, Myself & I",251466,True,2015,77,0.756,0.674,0,-6.518,0,0.0959,0.0184,0.0,0.158,0.389,111.995,"hip hop, pop" -David Guetta,This One's for You (feat. Zara Larsson) (Official Song UEFA EURO 2016),207272,False,2016,68,0.367,0.915,9,-3.456,0,0.0488,0.0022,5.81e-05,0.0905,0.365,110.169,"hip hop, pop, Dance/Electronic" -Astrid S,Hurts So Good,208728,False,2016,0,0.672,0.589,7,-5.008,0,0.049,0.082,0.0,0.0962,0.379,120.036,"rock, pop, Dance/Electronic" -Lil Wayne,"Sucker for Pain (with Wiz Khalifa, Imagine Dragons, Logic & Ty Dolla $ign feat. X Ambassadors)",243490,True,2016,75,0.502,0.786,9,-4.378,0,0.317,0.255,0.0,0.65,0.739,169.021,"hip hop, pop" -Twenty One Pilots,Heathens,195920,False,2016,80,0.732,0.396,4,-9.348,0,0.0286,0.0841,3.58e-05,0.105,0.548,90.024,rock -Future,Low Life (feat. The Weeknd),313546,True,2016,74,0.722,0.331,8,-7.789,1,0.0726,0.337,0.282,0.146,0.102,143.961,"hip hop, pop" -Galantis,No Money,189126,False,2016,67,0.671,0.916,6,-4.014,0,0.0397,0.0282,0.00762,0.24,0.803,126.01,"pop, Dance/Electronic" -The Chainsmokers,Don't Let Me Down,208373,False,2016,79,0.532,0.869,11,-5.094,1,0.172,0.157,0.00508,0.136,0.422,159.803,"pop, Dance/Electronic" -Tinie Tempah,Girls Like (feat. Zara Larsson),196000,True,2016,62,0.916,0.804,0,-3.406,1,0.049,0.37,1.18e-05,0.0812,0.538,120.028,"hip hop, pop, Dance/Electronic" -Alok,Hear Me Now,192846,False,2016,51,0.788,0.43,11,-7.757,1,0.0419,0.537,0.00261,0.0936,0.461,121.996,"pop, Dance/Electronic" -Jon Bellion,All Time Low,217603,True,2016,70,0.617,0.567,0,-4.188,1,0.0828,0.0584,0.0,0.0933,0.505,90.246,"hip hop, pop" -Cheat Codes,Sex,228361,False,2016,0,0.51,0.692,0,-5.825,1,0.171,0.00451,0.0,0.138,0.209,102.42,"pop, Dance/Electronic" -Meghan Trainor,NO,213506,False,2017,69,0.557,0.803,1,-3.599,0,0.213,0.0124,1.61e-06,0.737,0.657,92.134,pop -Dua Lipa,Hotter Than Hell,187957,False,2017,65,0.532,0.868,3,-4.23,0,0.0908,0.011,0.0,0.0584,0.529,110.127,pop -Martin Garrix,In the Name of Love,195706,False,2016,76,0.501,0.519,4,-5.88,0,0.0409,0.109,0.0,0.454,0.168,133.99,"pop, Dance/Electronic" -Alan Walker,Faded,212106,False,2018,78,0.468,0.627,6,-5.085,1,0.0476,0.0281,7.97e-06,0.11,0.159,179.642,Dance/Electronic -Tove Lo,Cool Girl,197815,True,2016,0,0.674,0.71,9,-7.042,0,0.111,0.343,6.71e-06,0.129,0.234,101.983,"pop, Dance/Electronic" -DJ Snake,Let Me Love You,205946,False,2016,81,0.649,0.716,8,-5.371,1,0.0349,0.0863,2.63e-05,0.135,0.163,99.988,"hip hop, pop, Dance/Electronic" -Calvin Harris,This Is What You Came For (feat. Rihanna),222160,False,2016,79,0.631,0.927,9,-2.787,0,0.0332,0.199,0.119,0.148,0.465,123.962,"hip hop, pop, Dance/Electronic" -Mike Posner,I Took A Pill In Ibiza - Seeb Remix,197933,True,2016,77,0.664,0.714,7,-6.645,0,0.111,0.0353,8.42e-06,0.0843,0.71,101.969,"hip hop, pop, Dance/Electronic" -Kevin Gates,2 Phones,240000,True,2016,69,0.895,0.681,7,-5.267,0,0.358,0.157,0.0,0.189,0.554,121.918,"hip hop, pop" -Rihanna,Needed Me,191600,True,2016,80,0.671,0.314,5,-8.091,0,0.244,0.11,0.0,0.0825,0.296,110.898,"hip hop, pop, R&B" -Ariana Grande,Side To Side,226160,True,2016,0,0.648,0.738,6,-5.883,0,0.247,0.0408,0.0,0.292,0.603,159.145,pop -Marshmello,Alone,273802,False,2016,71,0.631,0.953,2,-3.739,1,0.0343,0.0241,0.0155,0.108,0.422,141.99,"pop, Dance/Electronic" -Shawn Mendes,I Know What You Did Last Summer,223853,False,2015,73,0.687,0.761,9,-4.582,0,0.0876,0.102,0.0,0.147,0.743,113.939,pop -Selena Gomez,Same Old Love,229080,False,2015,69,0.672,0.593,11,-4.01,0,0.0304,0.0223,0.0,0.214,0.438,98.02,pop -Calvin Harris,My Way,219159,False,2016,74,0.818,0.913,4,-3.06,0,0.0426,0.093,3.69e-05,0.161,0.536,119.986,"hip hop, pop, Dance/Electronic" -Fitz and The Tantrums,HandClap,193253,False,2016,64,0.636,0.836,8,-3.004,1,0.0427,0.00609,0.000157,0.0828,0.715,139.956,"pop, rock" -Twenty One Pilots,Ride,214506,False,2015,80,0.645,0.713,6,-5.355,1,0.0393,0.00835,0.0,0.113,0.566,74.989,rock -Flo Rida,My House,192190,False,2015,76,0.688,0.702,7,-4.792,0,0.0499,0.0215,0.0,0.128,0.74,94.006,"hip hop, pop" -Ariana Grande,Dangerous Woman,235946,False,2016,0,0.677,0.604,7,-5.32,1,0.0385,0.0612,0.0,0.353,0.297,134.052,pop -Anne-Marie,Alarm,205593,True,2016,57,0.756,0.589,0,-5.093,1,0.232,0.0812,0.0,0.176,0.811,146.928,"pop, Dance/Electronic" -Charlie Puth,We Don't Talk Anymore (feat. Selena Gomez),217706,False,2016,79,0.728,0.563,1,-8.053,0,0.134,0.621,0.0,0.179,0.352,100.017,pop -AlunaGeorge,I'm In Control,209425,False,2016,0,0.734,0.847,11,-3.713,0,0.0346,0.355,0.000144,0.154,0.673,104.013,"pop, Dance/Electronic" -Enrique Iglesias,DUELE EL CORAZON (feat. Wisin),199693,False,2016,68,0.716,0.908,8,-3.254,0,0.103,0.0858,0.0,0.135,0.869,91.03,"pop, latin" -MNEK,Never Forget You,213427,False,2015,45,0.583,0.732,11,-5.728,0,0.0457,0.00312,9.86e-06,0.269,0.276,145.992,Dance/Electronic -Mike Perry,The Ocean (feat. Shy Martin),183414,False,2016,70,0.632,0.575,5,-6.478,1,0.029,0.0225,1.81e-06,0.104,0.188,90.037,"pop, Dance/Electronic" -Drake,Hotline Bling,267066,False,2016,0,0.903,0.62,2,-8.094,1,0.0587,0.00347,0.000119,0.0504,0.539,134.96,"hip hop, pop, R&B" -Ariana Grande,Into You,244453,False,2016,3,0.623,0.734,9,-5.948,1,0.107,0.0162,1.75e-06,0.145,0.37,107.853,pop -Shawn Mendes,Treat You Better,187973,False,2017,82,0.444,0.819,10,-4.078,0,0.341,0.106,0.0,0.107,0.747,82.695,pop -Tory Lanez,Say It,237786,True,2016,73,0.546,0.529,11,-10.511,0,0.0583,0.0862,6.5e-06,0.124,0.247,107.331,"hip hop, pop, R&B" -Coldplay,Adventure of a Lifetime,263786,False,2015,76,0.638,0.924,7,-3.887,1,0.036,0.00205,0.000175,0.149,0.53,111.995,"rock, pop" -Kungs,This Girl - Kungs Vs. Cookin' On 3 Burners,195561,False,2016,0,0.79,0.705,0,-4.684,0,0.0383,0.0807,4.81e-05,0.251,0.501,121.969,"pop, Dance/Electronic" -SAYGRACE,You Don't Own Me (feat. G-Eazy),201493,False,2016,69,0.332,0.635,7,-5.653,1,0.0898,0.159,2.79e-06,0.0599,0.261,186.249,"pop, R&B" -X Ambassadors,Renegades,195200,False,2015,75,0.526,0.862,2,-6.003,1,0.0905,0.0144,0.0597,0.229,0.528,90.052,"hip hop, rock, pop" -DNCE,Cake By The Ocean,219146,True,2016,79,0.774,0.753,4,-5.446,0,0.0517,0.152,0.0,0.0371,0.896,119.002,pop -99 Souls,The Girl Is Mine,216613,False,2016,55,0.683,0.943,9,-3.6,1,0.0397,0.00423,0.0972,0.0356,0.706,118.991,Dance/Electronic -Illy,Papercuts (feat. Vera Blue),255889,False,2016,51,0.369,0.618,6,-6.304,0,0.249,0.161,0.0,0.257,0.467,191.863,"hip hop, pop, Dance/Electronic" -Kiiara,Gold,225882,False,2015,55,0.6,0.412,8,-9.343,1,0.344,0.615,0.0025,0.134,0.408,113.049,"rock, pop, Dance/Electronic" -Kygo,Stole the Show,222813,False,2016,0,0.678,0.633,8,-6.443,0,0.171,0.151,0.0,0.147,0.478,99.886,"pop, Dance/Electronic" -Fifth Harmony,Work from Home (feat. Ty Dolla $ign),214480,False,2016,75,0.803,0.585,8,-5.861,1,0.0432,0.103,3.94e-06,0.0644,0.593,105.017,pop -Drake,Pop Style,212946,True,2016,63,0.713,0.462,7,-10.027,1,0.123,0.192,2.16e-06,0.105,0.0594,133.053,"hip hop, pop, R&B" -ZAYN,PILLOWTALK,203686,True,2016,59,0.588,0.702,11,-4.271,1,0.0496,0.104,0.0,0.089,0.429,124.909,"pop, Dance/Electronic" -Alessia Cara,Here,199453,False,2016,66,0.376,0.822,0,-3.974,1,0.104,0.0783,0.0,0.0841,0.327,120.493,"pop, R&B" -DJ Snake,Middle,220573,False,2016,0,0.611,0.7,9,-5.331,1,0.0436,0.0199,0.0,0.0549,0.213,104.981,"hip hop, pop, Dance/Electronic" -Sigala,Sweet Lovin' - Radio Edit,202149,False,2015,46,0.683,0.91,10,-1.231,1,0.0515,0.0553,4.78e-06,0.336,0.674,124.977,"pop, Dance/Electronic" -Drake,Jumpman,205879,True,2015,72,0.852,0.553,1,-7.286,1,0.187,0.0559,0.0,0.332,0.656,142.079,"hip hop, pop, R&B" -Charlie Puth,Dangerously,199133,False,2016,62,0.696,0.517,2,-8.379,0,0.0366,0.364,0.0,0.197,0.23,112.291,pop -Nick Jonas,Close,234213,False,2016,69,0.654,0.623,6,-5.273,0,0.082,0.253,0.0,0.144,0.401,123.996,pop -Jonas Blue,Perfect Strangers,196613,False,2016,0,0.742,0.819,1,-5.307,1,0.0487,0.372,0.0,0.277,0.709,117.986,"pop, Dance/Electronic" -Major Lazer,Cold Water (feat. Justin Bieber & MØ),185351,False,2016,0,0.608,0.798,6,-5.092,0,0.0432,0.0736,0.0,0.156,0.501,92.943,"hip hop, pop, Dance/Electronic" -The Chainsmokers,Roses,226738,False,2015,73,0.713,0.802,4,-7.055,1,0.0561,0.0435,0.00377,0.309,0.343,100.001,"pop, Dance/Electronic" -Tory Lanez,LUV,228640,True,2016,0,0.688,0.541,11,-8.128,0,0.114,0.0118,0.0,0.123,0.247,95.109,"hip hop, pop, R&B" -P!nk,"Just Like Fire (From the Original Motion Picture ""Alice Through The Looking Glass"")",215413,False,2016,68,0.632,0.702,7,-5.92,1,0.148,0.0114,0.0,0.108,0.523,162.958,pop -Bryson Tiller,Don't,198293,True,2015,78,0.765,0.356,11,-5.556,0,0.195,0.223,0.0,0.0963,0.189,96.991,"hip hop, pop, R&B" -Steve Aoki,Just Hold On,198774,False,2016,63,0.647,0.932,11,-3.515,1,0.0824,0.00383,1.5e-06,0.0574,0.374,114.991,"hip hop, pop, Dance/Electronic" -Clean Bandit,Tears (feat. Louisa Johnson),225914,False,2016,60,0.605,0.77,5,-3.645,0,0.0446,0.0431,0.0,0.159,0.298,130.037,"pop, Dance/Electronic" -Jonas Blue,Fast Car,212424,False,2015,1,0.459,0.587,9,-6.983,1,0.0785,0.453,0.0,0.307,0.581,113.901,"pop, Dance/Electronic" -Flume,Never Be Like You,233337,True,2016,0,0.443,0.558,0,-5.436,1,0.0624,0.441,0.0,0.163,0.248,116.838,Dance/Electronic -Bastille,Good Grief,206493,False,2016,65,0.73,0.758,1,-4.888,1,0.0653,0.147,0.0,0.311,0.877,120.041,"pop, rock" -Sia,The Greatest (feat. Kendrick Lamar),210226,False,2016,68,0.668,0.725,1,-6.127,1,0.266,0.0102,0.000479,0.0561,0.729,191.944,pop -PARTYNEXTDOOR,Not Nice,202661,False,2016,62,0.598,0.496,0,-9.309,1,0.0804,0.0253,0.00132,0.12,0.725,128.06,"hip hop, pop, R&B" -Garrett Nash,"i hate u, i love u (feat. olivia o'brien)",251033,True,2016,77,0.492,0.275,6,-13.4,0,0.3,0.687,0.0,0.101,0.18,92.6,"hip hop, pop" -Hilltop Hoods,1955,239280,False,2016,63,0.766,0.5,11,-7.558,1,0.0695,0.719,5.91e-06,0.304,0.71,84.044,"hip hop, pop" -Fifth Harmony,All In My Head (Flex) (feat. Fetty Wap),210573,False,2016,62,0.689,0.791,0,-5.194,0,0.053,0.023,0.0,0.0526,0.755,95.04,pop -Daya,"Sit Still, Look Pretty",202221,False,2016,1,0.657,0.739,2,-4.081,1,0.274,0.141,0.0,0.178,0.543,181.994,"rock, pop, Dance/Electronic" -Kent Jones,Don't Mind,198236,True,2016,63,0.464,0.771,8,-4.503,1,0.336,0.0235,0.0,0.063,0.69,158.777,hip hop -Flume,Say It,262521,True,2016,53,0.581,0.531,3,-6.829,0,0.0322,0.0731,3.27e-06,0.0616,0.273,75.255,Dance/Electronic -Bryson Tiller,Exchange,194613,True,2015,76,0.525,0.433,6,-10.598,1,0.185,0.107,0.0,0.135,0.276,160.108,"hip hop, pop, R&B" -MØ,Final Song,235826,False,2016,66,0.695,0.672,1,-6.109,0,0.0345,0.014,7.95e-05,0.0756,0.245,104.988,"pop, Dance/Electronic" -Beyoncé,Sorry,232560,True,2016,67,0.775,0.598,2,-7.274,1,0.0535,0.00175,4.44e-06,0.253,0.356,129.988,"pop, R&B" -Little Mix,Shout Out to My Ex,246240,False,2017,1,0.773,0.747,5,-4.061,1,0.0889,0.0239,8.23e-06,0.11,0.8,126.014,pop -The Lumineers,Ophelia,160097,False,2016,0,0.664,0.576,5,-6.429,1,0.0286,0.63,0.000198,0.0902,0.621,76.026,"pop, Folk/Acoustic" -Frank Ocean,Pink + White,184516,False,2016,79,0.544,0.552,9,-7.45,1,0.0991,0.67,4.57e-05,0.415,0.554,159.738,"hip hop, pop, R&B" -Drake,Too Good,263373,True,2016,74,0.794,0.653,7,-7.839,1,0.104,0.0489,4.88e-05,0.1,0.397,117.996,"hip hop, pop, R&B" -Ed Sheeran,Shape of You,233712,False,2017,84,0.825,0.652,1,-3.183,0,0.0802,0.581,0.0,0.0931,0.931,95.977,pop -Sean Paul,No Lie,221176,False,2018,84,0.742,0.882,7,-2.862,1,0.117,0.0466,0.0,0.206,0.463,102.04,"hip hop, pop" -Luis Fonsi,Despacito (Featuring Daddy Yankee),228200,False,2017,0,0.66,0.786,2,-4.757,1,0.17,0.209,0.0,0.112,0.846,177.833,"pop, latin" -Sam Smith,Too Good At Goodbyes,201000,False,2017,79,0.681,0.372,5,-8.237,1,0.0432,0.64,0.0,0.169,0.476,91.873,pop -Clean Bandit,Rockabye (feat. Sean Paul & Anne-Marie),251088,False,2016,75,0.72,0.763,9,-4.068,0,0.0523,0.406,0.0,0.18,0.742,101.965,"pop, Dance/Electronic" -Camila Cabello,Havana (feat. Young Thug),216896,False,2017,1,0.768,0.517,7,-4.323,0,0.0312,0.186,3.8e-05,0.104,0.418,104.992,pop -Imagine Dragons,Thunder,187146,False,2017,1,0.6,0.81,0,-4.749,1,0.0479,0.00683,0.21,0.155,0.298,167.88,rock -Maroon 5,Don't Wanna Know (feat. Kendrick Lamar),214265,False,2018,68,0.783,0.61,7,-6.124,1,0.0696,0.343,0.0,0.0983,0.418,100.047,pop -The Chainsmokers,Something Just Like This,247160,False,2017,83,0.617,0.635,11,-6.769,0,0.0317,0.0498,1.44e-05,0.164,0.446,103.019,"pop, Dance/Electronic" -Ed Sheeran,Perfect,263400,False,2017,85,0.599,0.448,8,-6.312,1,0.0232,0.163,0.0,0.106,0.168,95.05,pop -MGMT,Little Dark Age,299960,False,2018,81,0.705,0.712,6,-6.156,1,0.0385,0.0102,0.000855,0.1,0.62,97.512,rock -Travis Scott,goosebumps,243836,True,2016,83,0.841,0.728,7,-3.37,1,0.0484,0.0847,0.0,0.149,0.43,130.049,"hip hop, Dance/Electronic" -Billie Eilish,Bored,180933,False,2017,84,0.614,0.318,7,-12.695,1,0.0478,0.896,0.00239,0.0795,0.112,119.959,"pop, Dance/Electronic" -Future,Mask Off,204600,True,2017,79,0.833,0.434,2,-8.795,1,0.431,0.0102,0.0219,0.165,0.281,150.062,"hip hop, pop" -G-Eazy,Him & I,268866,True,2017,0,0.589,0.731,2,-6.343,1,0.0868,0.0534,0.0,0.308,0.191,87.908,"hip hop, pop" -Maroon 5,Cold (feat. Future),234308,True,2017,0,0.697,0.716,9,-6.288,0,0.113,0.118,0.0,0.0424,0.506,99.905,pop -Rae Sremmurd,Swang,208120,True,2016,75,0.681,0.314,8,-9.319,1,0.0581,0.2,9.82e-06,0.1,0.166,139.992,"hip hop, pop" -Lil Peep,Save That Shit,231546,True,2017,77,0.534,0.583,2,-8.672,1,0.0288,0.0262,0.0,0.421,0.145,105.997,hip hop -A$AP Ferg,Plain Jane,173600,True,2017,74,0.797,0.844,11,-5.482,1,0.275,0.0651,0.0,0.087,0.52,170.142,"hip hop, pop" -French Montana,Unforgettable,233901,True,2017,82,0.726,0.769,6,-5.043,1,0.123,0.0293,0.0101,0.104,0.733,97.985,"hip hop, pop" -21 Savage,Bank Account,220306,True,2017,74,0.884,0.347,8,-8.227,0,0.35,0.015,7e-06,0.0871,0.376,75.016,hip hop -Axwell /\ Ingrosso,More Than You Know,203000,False,2017,0,0.645,0.741,5,-4.989,0,0.0339,0.0323,0.0,0.29,0.534,123.07,"pop, Dance/Electronic" -ZAYN,Dusk Till Dawn (feat. Sia) - Radio Edit,239000,False,2017,77,0.259,0.437,11,-6.589,0,0.0386,0.102,1.32e-06,0.106,0.0951,180.042,"pop, Dance/Electronic" -J Balvin,Mi Gente,189029,False,2017,53,0.548,0.701,11,-4.862,0,0.0914,0.0178,1.34e-05,0.134,0.309,104.237,latin -Jax Jones,You Don't Know Me - Radio Edit,213946,True,2016,1,0.876,0.669,11,-6.054,0,0.138,0.163,0.0,0.185,0.682,124.007,"hip hop, pop, Dance/Electronic" -Rae Sremmurd,Black Beatles,291893,True,2016,75,0.794,0.632,0,-6.163,1,0.0649,0.142,0.0,0.128,0.355,145.926,"hip hop, pop" -Zara Larsson,Ain't My Fault,224030,False,2017,67,0.576,0.782,6,-4.825,0,0.0296,0.00778,0.0,0.285,0.355,141.153,"pop, Dance/Electronic" -Travis Scott,BUTTERFLY EFFECT,190677,False,2017,0,0.763,0.598,11,-6.865,1,0.0539,0.0714,0.0,0.112,0.182,140.987,"hip hop, Dance/Electronic" -Ayo & Teo,Rolex,238586,False,2017,69,0.804,0.886,1,-2.512,1,0.04,0.0837,0.0,0.266,0.789,144.946,"hip hop, pop" -Lil Uzi Vert,XO Tour Llif3,182706,True,2017,81,0.732,0.75,11,-6.366,0,0.231,0.00264,0.0,0.109,0.401,155.096,hip hop -Imagine Dragons,Whatever It Takes,201240,False,2017,80,0.672,0.655,10,-5.021,0,0.0311,0.0362,0.0,0.117,0.556,134.945,rock -Rob $tone,Chill Bill (feat. J. Davi$ & Spooks),177184,True,2016,76,0.886,0.427,6,-10.028,1,0.145,0.0312,0.00099,0.0906,0.23,108.034,hip hop -Jason Derulo,Swalla (feat. Nicki Minaj & Ty Dolla $ign),216408,True,2017,75,0.696,0.817,1,-3.862,1,0.109,0.075,0.0,0.187,0.782,98.064,"hip hop, pop" -G-Eazy,No Limit,245386,True,2017,0,0.838,0.771,1,-3.791,1,0.244,0.0117,0.0,0.0853,0.405,175.957,"hip hop, pop" -Playboi Carti,Magnolia,181812,True,2017,77,0.791,0.582,11,-7.323,0,0.286,0.0114,0.0,0.35,0.443,162.991,hip hop -Chord Overstreet,Hold On,198853,False,2017,80,0.618,0.443,2,-9.681,1,0.0526,0.469,0.0,0.0829,0.167,119.949,"pop, Folk/Acoustic" -Jax Jones,Breathe,207629,False,2017,58,0.722,0.744,11,-5.52,0,0.0363,0.0234,0.000157,0.143,0.686,125.985,"hip hop, pop, Dance/Electronic" -Imagine Dragons,Believer,204346,False,2017,1,0.779,0.787,10,-4.305,0,0.108,0.0524,0.0,0.14,0.708,124.982,rock -Natti Natasha,Criminal,232549,False,2017,74,0.814,0.813,2,-3.023,0,0.0561,0.03,9.33e-05,0.255,0.839,79.997,"pop, latin" -Kendrick Lamar,HUMBLE.,177000,True,2017,0,0.906,0.625,1,-6.779,0,0.0903,0.000243,3.23e-05,0.0975,0.423,150.018,hip hop -Selena Gomez,Fetish (feat. Gucci Mane),186112,False,2017,69,0.708,0.618,2,-4.424,1,0.0592,0.0204,6.81e-06,0.062,0.265,123.013,pop -Charlie Puth,Attention,211475,False,2017,0,0.774,0.626,3,-4.432,0,0.0432,0.0969,3.12e-05,0.0848,0.777,100.041,pop -Big Shaq,Man's Not Hot,186026,False,2017,62,0.905,0.884,0,-4.076,1,0.236,0.111,8.31e-05,0.107,0.588,135.048,"pop, Dance/Electronic" -Anne-Marie,Ciao Adios,200104,False,2017,57,0.698,0.882,4,-3.078,0,0.0863,0.127,0.0,0.15,0.445,106.083,"pop, Dance/Electronic" -Rag'n'Bone Man,Human,200186,False,2017,76,0.602,0.707,9,-4.097,1,0.302,0.393,0.0,0.165,0.554,75.087,R&B -blackbear,do re mi,212027,True,2017,4,0.745,0.593,8,-6.35,1,0.0526,0.00522,5.25e-06,0.123,0.17,111.002,"hip hop, pop, Dance/Electronic" -Lil Pump,Gucci Gang,124055,True,2017,64,0.936,0.523,5,-6.71,1,0.0597,0.239,0.0,0.117,0.699,119.889,hip hop -Taylor Swift,Look What You Made Me Do,211853,False,2017,76,0.766,0.709,9,-6.471,0,0.123,0.204,1.41e-05,0.126,0.506,128.07,pop -Enrique Iglesias,SUBEME LA RADIO (feat. Descemer Bueno & Zion & Lennox),207680,False,2017,70,0.688,0.822,0,-3.304,1,0.0537,0.0642,0.0,0.241,0.66,91.011,"pop, latin" -Kodak Black,Tunnel Vision,268186,True,2017,51,0.497,0.489,11,-7.724,0,0.294,0.0576,9.91e-05,0.122,0.231,171.853,hip hop -Kendrick Lamar,DNA.,185946,True,2017,0,0.636,0.517,0,-6.759,1,0.36,0.00402,0.0,0.0874,0.394,139.928,hip hop -A Boogie Wit da Hoodie,Drowning (feat. Kodak Black),209269,True,2017,0,0.839,0.81,5,-5.274,0,0.0568,0.501,0.0,0.117,0.814,129.014,hip hop -XXXTENTACION,Jocelyn Flores,119133,True,2017,83,0.872,0.391,0,-9.144,0,0.242,0.469,4.13e-06,0.297,0.437,134.021,hip hop -Migos,Bad and Boujee (feat. Lil Uzi Vert),343150,True,2017,72,0.926,0.666,11,-5.314,1,0.244,0.0611,0.0,0.123,0.168,127.079,"hip hop, pop" -Yo Gotti,Rake It Up (feat. Nicki Minaj),276333,True,2017,67,0.91,0.444,1,-8.126,0,0.344,0.022,0.0,0.137,0.53,149.953,"hip hop, pop" -Martin Garrix,Scared to Be Lonely,220883,False,2017,77,0.584,0.54,1,-7.786,0,0.0576,0.0895,0.0,0.261,0.195,137.972,"pop, Dance/Electronic" -Jonas Blue,Mama,181614,False,2017,51,0.739,0.792,11,-4.256,0,0.0418,0.091,0.0,0.0516,0.547,104.016,"pop, Dance/Electronic" -Martin Jensen,Solo Dance,174933,False,2016,0,0.744,0.836,6,-2.396,0,0.0507,0.0435,0.0,0.194,0.36,114.965,"pop, Dance/Electronic" -Taylor Swift,...Ready For It?,208186,False,2017,73,0.613,0.764,2,-6.509,1,0.136,0.0527,0.0,0.197,0.417,160.015,pop -David Guetta,2U (feat. Justin Bieber),194896,False,2017,67,0.548,0.65,8,-5.827,0,0.0591,0.219,0.0,0.225,0.557,144.937,"hip hop, pop, Dance/Electronic" -Migos,Slippery (feat. Gucci Mane),304041,True,2017,68,0.92,0.675,1,-5.661,0,0.263,0.307,0.0,0.104,0.749,141.967,"hip hop, pop" -Shawn Mendes,There's Nothing Holdin' Me Back,199440,False,2017,0,0.857,0.8,2,-4.035,1,0.0583,0.381,0.0,0.0913,0.966,121.996,pop -G-Eazy,Good Life (with G-Eazy & Kehlani),225520,False,2017,63,0.572,0.778,1,-5.208,1,0.233,0.00638,0.0,0.0563,0.54,168.073,"hip hop, pop" -The Vamps,All Night,197640,False,2017,73,0.538,0.804,8,-5.194,1,0.0358,0.0041,0.0,0.33,0.507,144.992,pop -Dua Lipa,New Rules,212000,False,2017,61,0.763,0.72,9,-5.181,0,0.0691,0.00261,1.11e-05,0.114,0.592,116.01,pop -Cardi B,Bodak Yellow,223962,True,2017,59,0.929,0.723,11,-5.792,0,0.109,0.0672,0.0,0.346,0.458,125.022,"hip hop, pop" -Rag'n'Bone Man,Skin,239626,False,2017,67,0.564,0.745,0,-7.733,1,0.31,0.265,0.0,0.147,0.351,170.661,R&B -Zara Larsson,I Would Like,226720,False,2017,57,0.486,0.713,2,-3.949,0,0.0524,0.0853,0.0,0.0839,0.297,121.028,"pop, Dance/Electronic" -DJ Khaled,Wild Thoughts (feat. Rihanna & Bryson Tiller),204173,True,2017,72,0.671,0.672,0,-3.094,0,0.0688,0.0329,0.0,0.118,0.632,97.979,"hip hop, pop" -Post Malone,Congratulations,220293,True,2016,81,0.63,0.804,6,-4.183,1,0.0363,0.215,0.0,0.253,0.492,123.146,hip hop -Avicii,Without You (feat. Sandro Cavazza),181672,True,2017,77,0.662,0.858,2,-4.844,1,0.0428,0.00163,0.0,0.0456,0.295,133.993,"pop, Dance/Electronic" -Clean Bandit,Symphony (feat. Zara Larsson),214866,False,2017,0,0.718,0.609,0,-4.699,0,0.043,0.281,3.29e-05,0.234,0.497,122.95,"pop, Dance/Electronic" -Katy Perry,Chained To The Rhythm,237733,False,2017,69,0.562,0.8,0,-5.404,1,0.112,0.0814,0.0,0.199,0.471,95.029,pop -Zay Hilfigerrr,Juju on That Beat (TZ Anthem),144244,False,2016,57,0.807,0.887,1,-3.892,1,0.275,0.00381,0.0,0.391,0.78,160.517,set() -Calvin Harris,"Feels (feat. Pharrell Williams, Katy Perry & Big Sean)",223413,True,2017,79,0.893,0.745,11,-3.105,0,0.0571,0.0642,0.0,0.0943,0.872,101.018,"hip hop, pop, Dance/Electronic" -Migos,T-Shirt,242407,True,2017,64,0.865,0.687,10,-3.744,0,0.217,0.242,0.0,0.158,0.486,139.023,"hip hop, pop" -OneRepublic,Rich Love (with Seeb),201256,False,2017,64,0.401,0.872,1,-3.641,0,0.314,0.167,0.0,0.362,0.472,104.592,pop -Maggie Lindemann,Pretty Girl - Cheat Codes X CADE Remix,193613,True,2017,61,0.703,0.868,7,-4.661,0,0.0291,0.15,0.132,0.104,0.733,121.03,"rock, pop" -Macklemore,Glorious (feat. Skylar Grey),220454,True,2017,74,0.731,0.794,0,-5.126,0,0.0522,0.0323,2.59e-05,0.112,0.356,139.994,"hip hop, pop" -Hayden James,NUMB,217296,True,2019,47,0.617,0.558,10,-7.046,0,0.0431,0.184,1.03e-06,0.0911,0.4,147.932,"pop, Dance/Electronic" -Liam Payne,Strip That Down,204502,False,2017,1,0.869,0.485,6,-5.595,1,0.0545,0.246,0.0,0.0765,0.527,106.028,pop -Drake,Passionfruit,298940,True,2017,0,0.809,0.463,11,-11.377,1,0.0396,0.256,0.085,0.109,0.364,111.98,"hip hop, pop, R&B" -Justin Bieber,Friends (with BloodPop®),189466,False,2017,0,0.744,0.739,8,-5.35,1,0.0387,0.00459,0.0,0.306,0.649,104.99,pop -ZAYN,"I Don’t Wanna Live Forever (Fifty Shades Darker) - From ""Fifty Shades Darker (Original Motion Picture Soundtrack)""",245200,False,2016,0,0.735,0.451,0,-8.374,1,0.0585,0.0631,1.3e-05,0.325,0.0862,117.973,"pop, Dance/Electronic" -Alessia Cara,Scars To Your Beautiful,230226,False,2016,76,0.319,0.739,0,-5.74,1,0.272,0.0285,0.0,0.111,0.449,194.169,"pop, R&B" -2 Chainz,It's A Vibe,210200,True,2017,71,0.822,0.502,7,-7.38,1,0.148,0.0312,0.000887,0.114,0.525,73.003,"hip hop, pop" -Starley,Call on Me - Ryan Riback Remix,222040,False,2016,48,0.67,0.838,0,-4.031,1,0.0362,0.0605,0.000611,0.159,0.716,105.0,"pop, Dance/Electronic" -AJR,Weak,201160,False,2017,0,0.673,0.637,5,-4.518,1,0.0429,0.137,0.0,0.184,0.678,123.98,rock -Lana Del Rey,Lust for Life (with The Weeknd),264011,False,2017,0,0.499,0.676,0,-8.618,0,0.0582,0.588,0.00484,0.358,0.295,99.979,pop -CNCO,Reggaetón Lento (Remix),188786,False,2017,0,0.623,0.909,4,-3.079,0,0.0758,0.158,0.0,0.0972,0.651,93.984,"pop, latin" -The Vamps,Middle Of The Night,174600,False,2017,61,0.588,0.749,11,-4.323,0,0.058,0.0037,0.0,0.0813,0.397,130.093,pop -The Weeknd,I Feel It Coming,269186,False,2016,78,0.773,0.819,0,-5.946,0,0.118,0.428,0.0,0.0679,0.585,92.987,"pop, R&B" -The Chainsmokers,Paris,221506,False,2017,73,0.653,0.658,2,-6.428,1,0.0304,0.0215,1.66e-06,0.0939,0.219,99.99,"pop, Dance/Electronic" -Big Sean,Bounce Back,222360,True,2017,71,0.78,0.575,1,-5.628,0,0.139,0.106,0.0,0.129,0.273,81.502,"hip hop, pop" -Shawn Mendes,Mercy,208733,False,2017,0,0.561,0.674,4,-4.882,0,0.0818,0.118,0.0,0.111,0.383,148.127,pop -Avicii,Lonely Together (feat. Rita Ora),181812,False,2017,53,0.655,0.666,5,-5.309,1,0.0612,0.134,2.12e-05,0.0683,0.272,102.977,"pop, Dance/Electronic" -Cheat Codes,No Promises (feat. Demi Lovato),223503,False,2017,68,0.741,0.667,10,-5.445,1,0.134,0.0575,0.0,0.106,0.595,112.956,"pop, Dance/Electronic" -Bruno Mars,24K Magic,225983,False,2016,78,0.818,0.803,1,-4.282,1,0.0797,0.034,0.0,0.153,0.632,106.97,pop -Lorde,Homemade Dynamite - REMIX,214254,True,2017,8,0.781,0.548,0,-4.997,0,0.0764,0.229,0.0,0.127,0.175,106.996,"pop, Dance/Electronic" -Kygo,It Ain't Me (with Selena Gomez),220780,False,2017,75,0.64,0.533,0,-6.596,1,0.0706,0.119,0.0,0.0864,0.515,99.968,"pop, Dance/Electronic" -Drake,God's Plan,198973,True,2018,81,0.754,0.449,7,-9.211,1,0.109,0.0332,8.29e-05,0.552,0.357,77.169,"hip hop, pop, R&B" -Post Malone,rockstar (feat. 21 Savage),218146,True,2018,83,0.585,0.52,5,-6.136,0,0.0712,0.124,7.01e-05,0.131,0.129,159.801,hip hop -Offset,Ric Flair Drip (with Metro Boomin),172800,True,2017,80,0.88,0.428,9,-8.28,1,0.206,0.149,5.05e-05,0.114,0.333,100.007,"hip hop, pop" -G-Eazy,Him & I (with Halsey),268866,True,2017,75,0.589,0.731,2,-6.343,1,0.0868,0.0534,0.0,0.308,0.191,87.908,"hip hop, pop" -Dynoro,In My Mind,184560,False,2018,78,0.694,0.77,6,-5.335,1,0.149,0.176,1.1e-05,0.118,0.163,125.905,"pop, Dance/Electronic" -Juice WRLD,Lucid Dreams,239835,True,2018,84,0.511,0.566,6,-7.23,0,0.2,0.349,0.0,0.34,0.218,83.903,hip hop -Zedd,The Middle,184732,False,2018,80,0.753,0.657,7,-3.061,1,0.0449,0.171,0.0,0.112,0.437,107.01,"hip hop, pop, Dance/Electronic" -Marshmello,FRIENDS,202620,True,2018,78,0.626,0.88,9,-2.384,0,0.0504,0.205,0.0,0.128,0.534,95.079,"pop, Dance/Electronic" -Cardi B,I Like It,253390,True,2018,79,0.816,0.726,5,-3.998,0,0.129,0.099,0.0,0.372,0.65,136.048,"hip hop, pop" -XXXTENTACION,Moonlight,135090,True,2018,82,0.921,0.537,9,-5.723,0,0.0804,0.556,0.00404,0.102,0.711,128.009,hip hop -Ariana Grande,no tears left to cry,205920,False,2018,77,0.699,0.713,9,-5.507,0,0.0594,0.04,3.11e-06,0.294,0.354,121.993,pop -Sheck Wes,Mo Bamba,183906,True,2018,77,0.729,0.625,4,-5.266,1,0.0315,0.194,0.00986,0.248,0.261,146.034,hip hop -Calvin Harris,One Kiss (with Dua Lipa),214846,False,2018,81,0.791,0.862,9,-3.24,0,0.11,0.037,2.19e-05,0.0814,0.592,123.994,"hip hop, pop, Dance/Electronic" -Tyga,Taste (feat. Offset),232959,True,2018,73,0.884,0.559,0,-7.442,1,0.12,0.0236,0.0,0.101,0.342,97.994,"hip hop, pop" -Travis Scott,SICKO MODE,312820,True,2018,81,0.834,0.73,8,-3.714,1,0.222,0.00513,0.0,0.124,0.446,155.008,"hip hop, Dance/Electronic" -Imagine Dragons,Natural,189466,False,2018,80,0.704,0.611,2,-6.112,1,0.0409,0.217,0.0,0.0812,0.22,100.0,rock -DJ Snake,"Taki Taki (with Selena Gomez, Ozuna & Cardi B)",212500,True,2018,76,0.842,0.801,8,-4.167,0,0.228,0.157,4.82e-06,0.0642,0.617,95.881,"hip hop, pop, Dance/Electronic" -Marshmello,Spotlight,177600,False,2018,71,0.546,0.822,7,-4.483,1,0.151,0.0145,0.0,0.742,0.615,150.06,"pop, Dance/Electronic" -The Weeknd,Pray For Me (with Kendrick Lamar),211440,True,2018,69,0.735,0.677,2,-4.979,1,0.093,0.0762,2.17e-05,0.111,0.188,100.584,"pop, R&B" -Billie Eilish,lovely (with Khalid),200185,False,2018,86,0.351,0.296,4,-10.109,0,0.0333,0.934,0.0,0.095,0.12,115.284,"pop, Dance/Electronic" -Migos,Walk It Talk It,276147,True,2018,75,0.907,0.633,2,-5.145,1,0.184,0.0876,2.6e-06,0.106,0.395,145.914,"hip hop, pop" -Clean Bandit,Solo (feat. Demi Lovato),222653,False,2018,71,0.737,0.636,11,-4.546,0,0.0437,0.0441,6.66e-05,0.35,0.565,105.005,"pop, Dance/Electronic" -Drake,Nonstop,238614,True,2018,77,0.912,0.412,7,-8.074,1,0.123,0.0165,0.0126,0.104,0.423,154.983,"hip hop, pop, R&B" -6ix9ine,FEFE,179404,True,2018,42,0.931,0.387,1,-9.127,1,0.412,0.088,0.0,0.136,0.376,125.978,hip hop -The Weeknd,Call Out My Name,228373,False,2018,80,0.461,0.593,1,-4.954,1,0.0356,0.17,0.0,0.307,0.175,134.17,"pop, R&B" -Nio Garcia,Te Boté - Remix,417920,True,2018,76,0.903,0.675,11,-3.445,0,0.214,0.542,1.28e-05,0.0595,0.442,96.507,latin -Migos,MotorSport,303076,True,2018,72,0.904,0.518,2,-5.32,1,0.183,0.0305,0.0,0.325,0.188,137.996,"hip hop, pop" -Nicky Jam,X,172854,False,2019,74,0.594,0.749,9,-5.298,0,0.056,0.0338,0.00106,0.3,0.694,179.968,latin -XXXTENTACION,SAD!,166605,True,2018,82,0.74,0.613,8,-4.88,1,0.145,0.258,0.00372,0.123,0.473,75.023,hip hop -Becky G,Sin Pijama,188560,False,2018,69,0.791,0.745,11,-3.695,0,0.0464,0.354,2.93e-05,0.104,0.82,94.014,"pop, latin" -Machine Gun Kelly,"Home (with Machine Gun Kelly, X Ambassadors & Bebe Rexha)",202804,False,2017,71,0.653,0.718,3,-5.232,0,0.213,0.00413,0.0,0.0537,0.216,82.034,"hip hop, pop" -6ix9ine,GUMMO,157643,True,2018,63,0.66,0.775,2,-4.926,1,0.172,0.0553,0.0,0.129,0.635,157.036,hip hop -Jax Jones,Breathe,207629,False,2017,58,0.722,0.744,11,-5.52,0,0.0363,0.0234,0.000157,0.143,0.686,125.985,"hip hop, pop, Dance/Electronic" -The Chainsmokers,Sick Boy,193200,False,2018,69,0.663,0.577,11,-7.518,0,0.0531,0.109,0.0,0.12,0.454,89.996,"pop, Dance/Electronic" -Jay Rock,"King's Dead (with Kendrick Lamar, Future & James Blake)",229670,True,2018,71,0.645,0.705,1,-5.008,1,0.299,0.000813,0.0,0.128,0.376,137.133,"hip hop, pop" -Rich The Kid,Plug Walk,175229,True,2018,72,0.876,0.519,11,-6.531,1,0.143,0.202,0.0,0.108,0.158,94.981,"hip hop, pop" -Kendrick Lamar,All The Stars (with SZA),232186,True,2018,80,0.698,0.633,8,-4.946,1,0.0597,0.0605,0.000194,0.0926,0.552,96.924,hip hop -Logic,Everyday,204746,True,2018,69,0.667,0.741,1,-4.099,1,0.0378,0.0425,0.0,0.0761,0.422,149.908,"hip hop, pop" -Jason Derulo,Tip Toe (feat. French Montana),187521,False,2017,64,0.845,0.709,10,-4.547,0,0.0714,0.0233,0.0,0.094,0.62,98.062,"hip hop, pop" -Ariana Grande,God is a woman,197546,True,2018,78,0.602,0.658,1,-5.934,1,0.0558,0.0233,6e-05,0.237,0.268,145.031,pop -Rae Sremmurd,Powerglide (feat. Juicy J) - From SR3MM,332300,True,2018,66,0.713,0.831,1,-4.75,0,0.15,0.0168,0.0,0.118,0.584,173.948,"hip hop, pop" -EO,German,170825,False,2018,67,0.862,0.583,8,-6.26,0,0.0654,0.811,1.86e-05,0.191,0.852,103.019,set() -Dennis Lloyd,Nevermind,156600,False,2017,74,0.6,0.688,5,-8.339,0,0.201,0.159,1.29e-05,0.409,0.0793,99.977,pop -girl in red,we fell in love in october,184153,False,2018,82,0.566,0.366,7,-12.808,1,0.028,0.113,0.181,0.155,0.237,129.959,"pop, rock, Dance/Electronic" -Selena Gomez,Wolves,197993,False,2017,78,0.724,0.804,11,-4.614,0,0.0448,0.124,0.0,0.204,0.306,124.987,pop -Tiësto,Jackie Chan,215759,True,2018,72,0.747,0.834,3,-2.867,0,0.045,0.374,0.0,0.0586,0.687,128.005,"pop, Dance/Electronic" -Tom Walker,Leave a Light On,185863,False,2017,70,0.586,0.624,5,-5.946,1,0.113,0.0153,1.78e-06,0.133,0.267,68.976,"rock, pop" -Sofía Reyes,"1, 2, 3 (feat. Jason Derulo & De La Ghetto)",201526,False,2018,65,0.792,0.895,1,-3.112,0,0.0589,0.165,0.0,0.0501,0.794,94.968,"pop, latin" -Marshmello,Silence,180822,False,2017,80,0.52,0.761,4,-3.093,1,0.0853,0.256,4.96e-06,0.17,0.286,141.971,"pop, Dance/Electronic" -5 Seconds of Summer,Youngblood,203417,False,2018,69,0.596,0.854,7,-5.114,0,0.463,0.0169,0.0,0.124,0.152,120.274,pop -Loud Luxury,Body (feat. brando),163216,False,2017,0,0.752,0.764,1,-4.399,1,0.038,0.0476,9.44e-05,0.0543,0.582,121.958,"pop, Dance/Electronic" -Liam Payne,For You (Fifty Shades Freed) (& Rita Ora),245453,False,2018,62,0.541,0.787,2,-4.618,0,0.0331,0.0167,0.0,0.157,0.2,113.0,pop -NF,Let You Down,212120,False,2017,79,0.662,0.714,5,-5.68,0,0.121,0.312,0.0,0.179,0.464,147.997,"hip hop, pop" -XXXTENTACION,changes,121886,False,2018,79,0.669,0.308,11,-10.068,1,0.029,0.883,0.0,0.0984,0.52,64.934,hip hop -Maluma,Corazón (feat. Nego do Borel),184720,False,2018,69,0.722,0.738,9,-6.073,0,0.247,0.328,1.47e-05,0.198,0.748,198.075,latin -Cardi B,Bodak Yellow,223712,True,2018,72,0.926,0.703,11,-6.337,0,0.103,0.0659,0.0,0.231,0.485,125.022,"hip hop, pop" -Lil Baby,Yes Indeed,142273,True,2018,79,0.963,0.346,5,-9.309,0,0.53,0.0355,0.0,0.108,0.562,119.957,hip hop -Jonas Blue,Rise,194407,False,2018,70,0.687,0.785,1,-4.65,1,0.0333,0.327,0.0,0.203,0.655,106.046,"pop, Dance/Electronic" -David Guetta,Flames,194680,False,2018,59,0.631,0.649,5,-5.892,0,0.0385,0.0817,3.8e-06,0.0934,0.421,93.95,"hip hop, pop, Dance/Electronic" -Ozuna,Vaina Loca,176133,False,2018,72,0.754,0.805,6,-4.249,1,0.0752,0.315,0.0,0.203,0.555,93.982,latin -Charlie Puth,How Long,200853,False,2018,72,0.845,0.561,1,-5.253,0,0.0778,0.211,3.49e-06,0.0383,0.811,109.974,pop -Post Malone,Better Now,231266,True,2018,80,0.68,0.578,10,-5.804,1,0.04,0.331,0.0,0.135,0.341,145.038,hip hop -BlocBoy JB,Look Alive (feat. Drake),181263,True,2018,73,0.922,0.581,10,-7.495,1,0.27,0.00104,5.86e-05,0.105,0.595,140.022,"hip hop, pop" -Eminem,River (feat. Ed Sheeran),221013,True,2017,72,0.748,0.749,8,-5.916,0,0.516,0.142,0.0,0.0713,0.659,90.09,hip hop -Marshmello,Happier,214289,False,2018,82,0.687,0.792,5,-2.749,1,0.0452,0.191,0.0,0.167,0.671,100.015,"pop, Dance/Electronic" -Dean Lewis,Be Alright,196373,True,2018,79,0.553,0.586,11,-6.319,1,0.0362,0.697,0.0,0.0813,0.443,126.684,pop -Charlie Puth,Done for Me (feat. Kehlani),180493,False,2018,67,0.856,0.632,6,-3.692,0,0.074,0.193,0.0,0.0688,0.697,112.009,pop -Migos,Stir Fry,190288,True,2018,72,0.817,0.816,2,-5.402,1,0.269,0.00263,0.0,0.159,0.508,181.982,"hip hop, pop" -YG,"BIG BANK (feat. 2 Chainz, Big Sean, Nicki Minaj)",237240,True,2018,68,0.745,0.346,1,-7.709,1,0.331,0.00552,0.0,0.0881,0.112,203.911,"hip hop, pop" -Drake,In My Feelings,217925,True,2018,75,0.835,0.626,1,-5.833,1,0.125,0.0589,6e-05,0.396,0.35,91.03,"hip hop, pop, R&B" -B Young,Jumanji,173153,False,2018,60,0.791,0.473,10,-9.86,0,0.25,0.279,0.0,0.0959,0.603,95.948,"hip hop, Dance/Electronic" -Charlie Puth,The Way I Am,186080,False,2018,64,0.755,0.769,10,-5.658,0,0.186,0.314,2.38e-06,0.0628,0.642,114.966,pop -Reik,Me Niego (feat. Ozuna & Wisin),221653,False,2018,1,0.777,0.779,0,-4.449,0,0.0972,0.0543,0.0,0.636,0.768,94.023,"pop, latin" -benny blanco,Eastside (with Halsey & Khalid),173799,False,2018,75,0.56,0.68,6,-7.648,0,0.321,0.555,0.0,0.116,0.319,89.391,"hip hop, pop" -Luis Fonsi,Échame La Culpa,173720,False,2017,62,0.733,0.892,0,-3.641,1,0.0417,0.0376,0.0,0.137,0.675,95.989,"pop, latin" -MK,17,196489,False,2017,65,0.703,0.832,0,-7.202,0,0.0689,0.00149,0.127,0.0696,0.667,122.029,"pop, Dance/Electronic" -Rita Ora,Anywhere,215064,False,2017,63,0.628,0.797,11,-3.953,0,0.0596,0.0364,0.0,0.104,0.321,106.93,"hip hop, pop, Dance/Electronic" -Banx & Ranx,Answerphone (feat. Yxng Bane),190920,False,2018,56,0.671,0.834,1,-3.928,0,0.0885,0.0387,0.0,0.454,0.454,113.021,Dance/Electronic -Ella Mai,Trip,213993,False,2018,72,0.477,0.61,11,-5.628,0,0.144,0.225,0.0,0.107,0.358,79.882,"hip hop, pop, R&B" -Daddy Yankee,Dura,200480,False,2018,71,0.783,0.84,1,-3.416,1,0.051,0.174,0.0,0.412,0.839,95.001,latin -DJ Khaled,No Brainer,260000,True,2018,67,0.552,0.76,0,-4.706,1,0.342,0.0733,0.0,0.0865,0.639,135.702,"hip hop, pop" -Anne-Marie,2002,186986,False,2018,80,0.697,0.683,1,-2.881,0,0.117,0.0372,0.0,0.137,0.603,96.133,"pop, Dance/Electronic" -Bruno Mars,Finesse - Remix; feat. Cardi B,217288,False,2017,74,0.704,0.859,5,-4.877,0,0.0996,0.0185,0.0,0.0215,0.926,105.115,pop -FINNEAS,Let's Fall in Love for the Night,190348,True,2018,71,0.737,0.408,5,-7.941,1,0.104,0.802,0.0,0.171,0.374,127.921,"rock, pop, Dance/Electronic" -Hugh Jackman,The Greatest Show,302146,False,2017,72,0.417,0.824,11,-7.36,0,0.105,0.000239,0.0545,0.0725,0.4,157.92,easy listening -Halsey,Bad At Love,181279,False,2017,72,0.675,0.751,0,-3.539,1,0.0296,0.0604,0.0,0.0893,0.612,118.384,"pop, Dance/Electronic" -Drake,I'm Upset,214466,True,2018,68,0.899,0.586,11,-7.866,0,0.343,0.279,0.0,0.0836,0.492,150.002,"hip hop, pop, R&B" -Camila Cabello,Never Be the Same,226973,False,2018,74,0.637,0.713,0,-4.333,1,0.0747,0.181,0.000637,0.137,0.243,129.923,pop -M.O,Bad Vibe,214253,False,2018,54,0.813,0.701,10,-4.428,1,0.0449,0.178,0.0,0.0962,0.758,110.009,"pop, Dance/Electronic" -Dua Lipa,IDGAF,217946,True,2017,78,0.836,0.544,7,-5.975,1,0.0943,0.0403,0.0,0.0824,0.51,97.028,pop -Cashmere Cat,Miss You (with Major Lazer & Tory Lanez),186231,False,2018,56,0.747,0.641,1,-4.502,1,0.0925,0.25,0.00107,0.106,0.453,100.028,"pop, rock" -Bazzi,Mine,131064,True,2018,75,0.71,0.789,4,-3.874,1,0.0722,0.0161,2.77e-06,0.451,0.717,142.929,"pop, Dance/Electronic" -Lil Dicky,Freaky Friday (feat. Chris Brown),216631,True,2018,70,0.755,0.599,8,-5.042,1,0.224,0.147,0.0,0.109,0.755,133.123,"hip hop, pop" -Lauv,I Like Me Better,197436,False,2018,81,0.752,0.505,9,-7.621,1,0.253,0.535,2.55e-06,0.104,0.419,91.97,"pop, Dance/Electronic" -The Chainsmokers,This Feeling,197946,False,2018,73,0.575,0.571,1,-7.906,1,0.0439,0.0558,0.0,0.0912,0.449,105.049,"pop, Dance/Electronic" -Maroon 5,Girls Like You (feat. Cardi B) - Cardi B Version,235545,True,2018,73,0.851,0.541,0,-6.825,1,0.0505,0.568,0.0,0.13,0.448,124.959,pop -Khalid,Love Lies (with Normani),201707,False,2018,73,0.708,0.648,6,-5.626,1,0.0449,0.0956,0.0,0.134,0.338,143.955,"pop, R&B" -Drake,Nice For What,210746,True,2018,77,0.585,0.909,8,-6.474,1,0.0707,0.0891,9.7e-05,0.119,0.758,93.372,"hip hop, pop, R&B" -Kendrick Lamar,LOVE. FEAT. ZACARI.,213400,True,2017,80,0.8,0.585,10,-7.343,1,0.0924,0.264,0.0,0.153,0.779,126.058,hip hop -Post Malone,Psycho (feat. Ty Dolla $ign),221440,True,2018,77,0.75,0.56,8,-8.094,1,0.105,0.546,0.0,0.111,0.459,140.06,hip hop -Lil Nas X,Old Town Road - Remix,157066,False,2019,79,0.878,0.619,6,-5.56,1,0.102,0.0533,0.0,0.113,0.639,136.041,"hip hop, pop" -Billie Eilish,bad guy,194087,False,2019,83,0.701,0.425,7,-10.965,1,0.375,0.328,0.13,0.1,0.562,135.128,"pop, Dance/Electronic" -Shawn Mendes,Señorita,190799,False,2019,78,0.759,0.548,9,-6.049,0,0.029,0.0392,0.0,0.0828,0.749,116.967,pop -Ariana Grande,7 rings,178626,True,2019,83,0.778,0.317,1,-10.732,0,0.334,0.592,0.0,0.0881,0.327,140.048,pop -Post Malone,Sunflower - Spider-Man: Into the Spider-Verse,157560,False,2019,79,0.755,0.522,2,-4.368,1,0.0575,0.533,0.0,0.0685,0.925,89.96,hip hop -Gesaffelstein,Lost in the Fire (feat. The Weeknd),202093,True,2019,84,0.658,0.671,2,-12.21,1,0.0363,0.0933,0.000927,0.115,0.166,100.966,Dance/Electronic -Halsey,Without Me,201660,True,2018,77,0.752,0.488,6,-7.05,1,0.0705,0.297,9.11e-06,0.0936,0.533,136.041,"pop, Dance/Electronic" -Regard,Ride It,157605,False,2019,81,0.88,0.751,7,-4.258,0,0.0874,0.177,6.43e-05,0.106,0.884,117.948,"pop, Dance/Electronic" -Lady Gaga,Shallow,215733,False,2018,82,0.572,0.385,7,-6.362,1,0.0308,0.371,0.0,0.231,0.323,95.799,pop -Mark Ronson,Nothing Breaks Like a Heart (feat. Miley Cyrus),217466,False,2018,79,0.601,0.794,7,-5.844,0,0.0671,0.00987,1.36e-06,0.388,0.244,114.066,pop -Lewis Capaldi,Someone You Loved,182160,False,2019,84,0.501,0.405,1,-5.679,1,0.0319,0.751,0.0,0.105,0.446,109.891,pop -Jax Jones,All Day And Night,169303,False,2019,60,0.585,0.782,1,-4.101,0,0.0897,0.269,0.0,0.156,0.521,121.908,"hip hop, pop, Dance/Electronic" -Mustard,Pure Water (with Migos),192470,True,2019,75,0.682,0.559,0,-5.545,1,0.127,0.174,0.0,0.344,0.137,202.015,"hip hop, pop" -NLE Choppa,Shotta Flow (feat. Blueface) [Remix],176631,True,2019,0,0.894,0.511,2,-4.768,1,0.42,0.0251,0.0,0.14,0.568,120.08,hip hop -Kodak Black,ZEZE (feat. Travis Scott & Offset),228759,True,2018,77,0.861,0.603,8,-5.788,0,0.176,0.0521,0.0,0.0924,0.504,98.043,hip hop -Daddy Yankee,Con Calma,193226,False,2019,79,0.737,0.86,8,-2.652,0,0.0593,0.11,1.94e-06,0.0574,0.656,93.989,latin -Alec Benjamin,Let Me Down Slowly,169353,False,2018,82,0.652,0.557,1,-5.714,0,0.0318,0.74,0.0,0.124,0.483,150.073,"rock, pop, Dance/Electronic" -BLACKPINK,Kill This Love,189052,False,2019,1,0.738,0.861,2,-4.141,1,0.237,0.318,0.00182,0.325,0.58,131.98,pop -Offset,Clout (feat. Cardi B),205803,True,2019,68,0.919,0.622,1,-7.384,1,0.0997,0.228,4.24e-06,0.122,0.424,140.022,"hip hop, pop" -MEDUZA,Piece Of Your Heart,152913,False,2019,75,0.677,0.744,10,-6.806,0,0.0295,0.0404,0.00016,0.074,0.631,124.08,"pop, Dance/Electronic" -Ed Sheeran,Antisocial (with Travis Scott),161746,False,2019,63,0.716,0.823,5,-5.313,0,0.0495,0.132,0.0,0.361,0.91,151.957,pop -Travis Scott,SICKO MODE,312820,True,2018,81,0.834,0.73,8,-3.714,1,0.222,0.00513,0.0,0.124,0.446,155.008,"hip hop, Dance/Electronic" -Mabel,Mad Love,169813,False,2019,0,0.631,0.803,0,-2.974,0,0.155,0.675,0.0,0.11,0.62,198.065,"pop, Dance/Electronic" -Lana Del Rey,Doin' Time,202192,True,2019,80,0.641,0.559,7,-11.132,0,0.0355,0.404,0.00402,0.0937,0.523,144.982,pop -DJ Snake,"Taki Taki (feat. Selena Gomez, Ozuna & Cardi B)",212500,True,2019,70,0.842,0.801,8,-4.167,0,0.228,0.157,4.82e-06,0.0642,0.617,95.881,"hip hop, pop, Dance/Electronic" -A Boogie Wit da Hoodie,Look Back at It,179449,True,2018,73,0.791,0.587,3,-5.075,0,0.0413,0.407,0.0,0.148,0.536,96.057,hip hop -Saweetie,My Type,126446,True,2019,71,0.899,0.811,2,-6.294,1,0.258,0.000677,0.0,0.0672,0.587,105.038,"hip hop, pop, R&B" -Lil Tecca,Ransom,131240,True,2019,78,0.745,0.642,7,-6.257,0,0.287,0.0204,0.0,0.0658,0.226,179.974,hip hop -Russ Millions,Keisha & Becky - Remix,252906,True,2019,69,0.863,0.471,6,-9.545,1,0.478,0.251,0.0,0.121,0.644,140.969,pop -Billie Eilish,lovely (with Khalid),200185,False,2018,86,0.351,0.296,4,-10.109,0,0.0333,0.934,0.0,0.095,0.12,115.284,"pop, Dance/Electronic" -Mabel,Don't Call Me Up,178480,False,2019,77,0.674,0.881,9,-2.853,1,0.147,0.296,3.01e-06,0.0793,0.234,98.994,"pop, Dance/Electronic" -Lil Nas X,Old Town Road,113000,False,2019,76,0.907,0.53,1,-6.112,1,0.127,0.0578,2.23e-06,0.101,0.507,135.998,"hip hop, pop" -Meek Mill,Going Bad (feat. Drake),180522,True,2018,78,0.889,0.496,4,-6.365,0,0.0905,0.259,0.0,0.252,0.544,86.003,"hip hop, pop" -Post Malone,Wow.,149546,True,2019,79,0.829,0.539,11,-7.359,0,0.208,0.136,1.78e-06,0.103,0.388,99.96,hip hop -Doja Cat,Juicy,202333,True,2019,57,0.786,0.658,0,-2.61,1,0.0661,0.0856,0.0,0.0689,0.458,170.037,pop -A Boogie Wit da Hoodie,Swervin (feat. 6ix9ine),189486,True,2018,75,0.581,0.662,9,-5.239,1,0.303,0.0153,0.0,0.111,0.434,93.023,hip hop -iann dior,emotions,131213,False,2019,72,0.63,0.63,9,-6.211,1,0.0395,0.0131,0.0,0.142,0.163,80.512,"hip hop, pop" -Paulo Londra,Adan y Eva,256971,False,2019,72,0.767,0.709,1,-4.47,1,0.336,0.323,0.0,0.0745,0.72,171.993,"hip hop, latin, Dance/Electronic" -Drake,Money In The Grave (Drake ft. Rick Ross),205426,True,2019,76,0.831,0.502,10,-4.045,0,0.046,0.101,0.0,0.122,0.101,100.541,"hip hop, pop, R&B" -Lil Nas X,Panini,114893,False,2019,71,0.703,0.594,5,-6.146,0,0.0752,0.342,0.0,0.123,0.475,153.848,"hip hop, pop" -Bad Bunny,MIA (feat. Drake),210367,False,2018,77,0.817,0.539,6,-6.349,0,0.0621,0.0141,0.000496,0.099,0.158,97.062,latin -Polo G,Pop Out (feat. Lil Tjay),166560,True,2019,78,0.772,0.639,1,-7.119,1,0.467,0.15,0.0,0.0698,0.261,168.112,hip hop -Juice WRLD,Robbery,240050,True,2019,53,0.685,0.692,2,-5.122,1,0.0457,0.328,0.0,0.153,0.578,159.966,hip hop -21 Savage,a lot,288624,True,2018,78,0.837,0.636,1,-7.643,1,0.086,0.0395,0.00125,0.342,0.274,145.972,hip hop -Young Thug,The London (feat. J. Cole & Travis Scott),200106,True,2019,69,0.796,0.586,4,-6.946,0,0.147,0.0247,0.0,0.132,0.179,97.981,"hip hop, pop" -Anuel AA,China,301714,False,2019,76,0.786,0.808,7,-3.702,1,0.0882,0.0846,0.000289,0.0822,0.609,105.027,latin -Camila Cabello,Liar,207038,False,2019,65,0.74,0.498,11,-6.684,0,0.0456,0.0169,0.00282,0.319,0.652,98.016,pop -Flipp Dinero,Leave Me Alone,195637,True,2019,69,0.792,0.743,7,-2.806,1,0.0851,0.107,0.0,0.183,0.742,150.024,"hip hop, pop" -Cardi B,Money,183527,True,2018,73,0.95,0.59,8,-6.508,0,0.29,0.00534,0.0,0.11,0.219,130.003,"hip hop, pop" -Ariana Grande,"break up with your girlfriend, i'm bored",190440,True,2019,76,0.726,0.554,5,-5.29,0,0.0917,0.0421,0.0,0.106,0.335,169.999,pop -Dave,Location (feat. Burna Boy),241293,True,2019,78,0.812,0.496,9,-5.969,0,0.297,0.271,0.0,0.0955,0.55,109.979,hip hop -Ava Max,Sweet but Psycho,187436,False,2018,7,0.719,0.704,1,-4.724,1,0.0476,0.0691,0.0,0.166,0.628,133.002,pop -Dua Lipa,Don't Start Now,183290,False,2019,79,0.794,0.793,11,-4.521,0,0.0842,0.0125,0.0,0.0952,0.677,123.941,pop -Young T & Bugsey,Strike a Pose (feat. Aitch),214203,True,2019,61,0.531,0.581,1,-5.801,1,0.101,0.0107,1.62e-05,0.101,0.591,137.776,hip hop -Calvin Harris,Giant (with Rag'n'Bone Man),229184,False,2019,73,0.807,0.887,1,-4.311,0,0.0361,0.016,0.000503,0.0811,0.606,122.015,"hip hop, pop, Dance/Electronic" -Ed Sheeran,Take Me Back to London (feat. Stormzy),189733,True,2019,66,0.885,0.762,8,-5.513,0,0.216,0.219,0.0,0.162,0.605,138.058,pop -Cardi B,Please Me,200889,True,2019,73,0.747,0.57,1,-6.711,1,0.081,0.0642,0.0,0.0832,0.65,133.992,"hip hop, pop" -Stormzy,Vossi Bop,196266,True,2019,64,0.682,0.653,8,-6.062,1,0.339,0.13,0.00116,0.129,0.428,188.115,"hip hop, Dance/Electronic" -Tones And I,Dance Monkey,209438,False,2019,78,0.824,0.588,6,-6.4,0,0.0924,0.692,0.000104,0.149,0.513,98.027,pop -Ariana Grande,boyfriend (with Social House),186106,True,2019,77,0.4,0.795,10,-3.731,0,0.461,0.119,0.0,0.159,0.702,190.097,pop -Bad Bunny,Callaita,250533,True,2019,81,0.61,0.624,2,-4.773,1,0.309,0.6,2.12e-06,0.243,0.244,176.169,latin -Ava Max,So Am I,183026,False,2019,4,0.682,0.656,6,-4.67,1,0.0435,0.0737,0.0,0.353,0.607,130.089,pop -Pedro Capó,Calma - Remix,238200,False,2018,74,0.826,0.773,11,-4.218,0,0.0524,0.323,0.0,0.143,0.761,126.899,"pop, latin" -J. Cole,MIDDLE CHILD,213593,True,2019,80,0.837,0.364,8,-11.713,1,0.276,0.149,0.0,0.271,0.463,123.984,hip hop -Lunay,Soltera - Remix,266086,False,2019,70,0.795,0.783,5,-4.271,1,0.0432,0.361,0.0,0.437,0.799,92.01,"pop, latin" -Khalid,Better,229412,False,2018,70,0.442,0.585,0,-10.332,0,0.0964,0.0984,0.391,0.14,0.116,97.565,"pop, R&B" -Blueface,Thotiana,129264,True,2018,1,0.906,0.382,10,-12.89,0,0.269,0.18,0.0,0.113,0.391,104.025,hip hop -DaBaby,Suge,163320,True,2019,72,0.876,0.662,2,-6.482,0,0.426,0.0608,0.0,0.127,0.844,75.445,hip hop -AJ Tracey,Ladbroke Grove,190537,False,2019,69,0.903,0.839,11,-9.447,0,0.208,0.0939,0.0,0.102,0.727,133.986,"hip hop, Dance/Electronic" -Billie Eilish,bury a friend,193143,False,2019,75,0.905,0.389,8,-14.505,1,0.332,0.74,0.162,0.106,0.196,120.046,"pop, Dance/Electronic" -BTS,Boy With Luv (feat. Halsey),229773,False,2019,35,0.645,0.862,11,-4.757,0,0.0965,0.0923,0.0,0.192,0.798,119.991,pop -Wiley,Boasty (feat. Idris Elba),177185,True,2019,64,0.887,0.765,2,-5.207,1,0.0669,0.00915,0.000872,0.0907,0.456,102.958,"hip hop, Dance/Electronic" -Jonas Brothers,Only Human,183000,False,2019,71,0.795,0.496,0,-5.883,1,0.0722,0.108,0.0,0.0645,0.874,94.01,pop -5 Seconds of Summer,Easier,157492,False,2019,1,0.505,0.428,5,-5.604,1,0.221,0.489,0.0,0.0977,0.618,175.813,pop -Avicii,SOS (feat. Aloe Blacc),157202,False,2019,71,0.802,0.645,5,-6.181,0,0.0715,0.272,0.0,0.119,0.376,100.001,"pop, Dance/Electronic" -"Tyler, The Creator",EARFQUAKE,190066,True,2019,80,0.554,0.498,9,-8.866,1,0.0685,0.23,5.98e-06,0.795,0.413,79.635,hip hop -Ashley O,On A Roll,154447,False,2019,57,0.736,0.81,5,-6.354,1,0.0906,0.077,0.0,0.0523,0.387,125.011,set() -Dominic Fike,3 Nights,177666,False,2018,78,0.815,0.518,7,-6.594,0,0.0897,0.223,0.0,0.104,0.877,151.891,"rock, pop" -Sigala,Wish You Well,205653,False,2019,64,0.669,0.895,5,-3.787,0,0.0575,0.128,0.0,0.297,0.576,124.975,"pop, Dance/Electronic" -Megan Thee Stallion,Hot Girl Summer (feat. Nicki Minaj & Ty Dolla $ign),199427,True,2019,69,0.872,0.814,0,-4.568,1,0.155,0.00485,1.96e-06,0.214,0.57,98.985,"hip hop, pop, R&B" -Anuel AA,Secreto,258800,False,2019,75,0.807,0.803,11,-4.156,1,0.126,0.602,0.00853,0.136,0.706,91.987,latin -Sam Feldt,Post Malone (feat. RANI),174444,False,2019,69,0.59,0.642,7,-3.87,1,0.122,0.0771,0.0,0.105,0.651,107.356,"pop, Dance/Electronic" -Kehlani,Nights Like This (feat. Ty Dolla $ign),201787,True,2019,74,0.61,0.725,8,-5.131,1,0.15,0.367,0.0,0.154,0.291,146.163,"hip hop, pop, R&B" -Sech,Otro Trago,225933,True,2019,71,0.746,0.7,0,-4.669,1,0.341,0.136,0.000159,0.11,0.619,176.044,latin -Post Malone,Better Now,231266,True,2018,80,0.68,0.578,10,-5.804,1,0.04,0.331,0.0,0.135,0.341,145.038,hip hop -Ed Sheeran,Cross Me (feat. Chance the Rapper & PnB Rock),206186,True,2019,64,0.746,0.787,4,-6.373,1,0.12,0.214,0.0,0.0669,0.607,95.005,pop -Lauv,i'm so tired...,162582,False,2019,2,0.599,0.733,11,-7.058,1,0.203,0.176,0.0,0.242,0.534,102.211,"pop, Dance/Electronic" -iann dior,gone girl,136568,True,2019,69,0.677,0.714,11,-5.637,1,0.0287,0.162,0.0,0.0717,0.355,94.956,"hip hop, pop" -Panic! At The Disco,High Hopes,190946,False,2018,80,0.579,0.904,5,-2.729,1,0.0618,0.193,0.0,0.064,0.681,82.014,rock -Marshmello,One Thing Right,181823,False,2019,73,0.659,0.625,4,-2.253,1,0.045,0.0644,0.0,0.582,0.442,88.042,"pop, Dance/Electronic" -Sam Smith,How Do You Sleep?,202204,False,2019,73,0.477,0.682,1,-4.931,0,0.0925,0.153,0.0,0.0763,0.345,110.567,pop -NSG,Options,240081,True,2020,57,0.836,0.621,1,-4.684,0,0.0894,0.389,9.16e-05,0.104,0.762,101.993,"World/Traditional, hip hop" -Normani,Motivation,193837,False,2019,71,0.599,0.887,4,-3.967,1,0.0984,0.0192,1.21e-06,0.3,0.881,170.918,"pop, R&B" -Joel Corry,Sorry,188640,False,2019,63,0.744,0.79,8,-4.617,0,0.0562,0.0547,0.000802,0.32,0.847,125.002,"pop, Dance/Electronic" -Post Malone,Goodbyes (Feat. Young Thug),174960,True,2019,1,0.58,0.653,5,-3.818,1,0.0745,0.447,0.0,0.111,0.175,150.231,hip hop -Jonas Brothers,Sucker,181026,False,2019,79,0.842,0.734,1,-5.065,0,0.0588,0.0427,0.0,0.106,0.952,137.958,pop -Taylor Swift,Cruel Summer,178426,False,2019,78,0.552,0.702,9,-5.707,1,0.157,0.117,2.06e-05,0.105,0.564,169.994,pop -Blanco Brown,The Git Up,200593,False,2019,69,0.847,0.678,9,-8.635,1,0.109,0.0669,0.0,0.274,0.811,97.984,"hip hop, country" -Sam Smith,Dancing With A Stranger (with Normani),171029,False,2019,75,0.741,0.52,8,-7.513,1,0.0656,0.45,1.97e-06,0.222,0.347,102.998,pop -Post Malone,Circles,215280,False,2019,85,0.695,0.762,0,-3.497,1,0.0395,0.192,0.00244,0.0863,0.553,120.042,hip hop +artist,song,duration_ms,explicit,year,popularity,danceability,energy,key,loudness,mode,speechiness,acousticness,instrumentalness,liveness,valence,tempo,genre +Britney Spears,Oops!...I Did It Again,211160,False,2000,77,0.751,0.834,1,-5.444,0,0.0437,0.3,1.77e-05,0.355,0.894,95.053,pop +blink-182,All The Small Things,167066,False,1999,79,0.434,0.897,0,-4.918,1,0.0488,0.0103,0.0,0.612,0.684,148.726,"rock, pop" +Faith Hill,Breathe,250546,False,1999,66,0.529,0.496,7,-9.007,1,0.029,0.173,0.0,0.251,0.278,136.859,"pop, country" +Bon Jovi,It's My Life,224493,False,2000,78,0.551,0.913,0,-4.063,0,0.0466,0.0263,1.35e-05,0.347,0.544,119.992,"rock, metal" +*NSYNC,Bye Bye Bye,200560,False,2000,65,0.614,0.928,8,-4.806,0,0.0516,0.0408,0.00104,0.0845,0.879,172.656,pop +Sisqo,Thong Song,253733,True,1999,69,0.706,0.888,2,-6.959,1,0.0654,0.119,9.64e-05,0.07,0.714,121.549,"hip hop, pop, R&B" +Eminem,The Real Slim Shady,284200,True,2000,86,0.949,0.661,5,-4.244,0,0.0572,0.0302,0.0,0.0454,0.76,104.504,hip hop +Robbie Williams,Rock DJ,258560,False,2000,68,0.708,0.772,7,-4.264,1,0.0322,0.0267,0.0,0.467,0.861,103.035,"pop, rock" +Destiny's Child,Say My Name,271333,False,1999,75,0.713,0.678,5,-3.525,0,0.102,0.273,0.0,0.149,0.734,138.009,"pop, R&B" +Modjo,Lady - Hear Me Tonight,307153,False,2001,77,0.72,0.808,6,-5.627,1,0.0379,0.00793,0.0293,0.0634,0.869,126.041,Dance/Electronic +Gigi D'Agostino,L'Amour Toujours,238759,False,2011,1,0.617,0.728,7,-7.932,1,0.0292,0.0328,0.0482,0.36,0.808,139.066,pop +Eiffel 65,Move Your Body - Gabry Ponte Original Radio Edit,268863,False,1999,56,0.745,0.958,7,-9.664,1,0.0287,0.0813,0.324,0.533,0.96,129.962,pop +Bomfunk MC's,Freestyler,306333,False,2000,55,0.822,0.922,11,-5.798,0,0.0989,0.0291,0.325,0.252,0.568,163.826,pop +Sting,Desert Rose,285960,False,1999,62,0.586,0.659,0,-7.92,0,0.0304,0.011,0.0,0.106,0.147,111.989,"rock, pop" +Melanie C,Never Be The Same Again,294200,False,1999,61,0.689,0.685,3,-5.153,1,0.0478,0.0921,0.0,0.119,0.398,160.067,"pop, Dance/Electronic" +Aaliyah,Try Again,284000,False,2002,53,0.797,0.622,6,-5.642,0,0.29,0.0807,0.0,0.0841,0.731,93.02,"hip hop, pop, R&B" +Anastacia,I'm Outta Love - Radio Edit,245400,False,1999,64,0.761,0.716,10,-5.8,0,0.056,0.396,0.0,0.0771,0.649,119.41,pop +Alice Deejay,Better Off Alone,214883,False,2000,73,0.671,0.88,8,-6.149,0,0.0552,0.00181,0.691,0.285,0.782,136.953,pop +Gigi D'Agostino,The Riddle,285426,False,1999,64,0.74,0.876,6,-6.87,0,0.0369,0.0173,0.00152,0.0785,0.825,127.002,pop +Dr. Dre,The Next Episode,161506,True,1999,82,0.922,0.909,10,-2.429,0,0.27,0.0281,0.0,0.0856,0.309,95.295,hip hop +Linkin Park,In the End,216880,False,2000,83,0.556,0.864,3,-5.87,0,0.0584,0.00958,0.0,0.209,0.4,105.143,"rock, metal" +Tom Jones,Sexbomb,211893,False,1999,65,0.801,0.876,8,-3.94,0,0.0446,0.144,1.38e-05,0.104,0.932,122.979,"rock, Folk/Acoustic, easy listening" +Sonique,It Feels So Good,240866,False,2000,62,0.634,0.677,5,-7.278,0,0.0304,0.0117,0.00103,0.126,0.558,135.012,pop +M.O.P.,Cold as Ice,244466,True,2000,54,0.656,0.88,11,-5.425,0,0.143,0.0421,0.0,0.294,0.758,85.565,hip hop +Melanie C,I Turn To You,352173,False,1999,54,0.522,0.803,1,-5.825,1,0.0327,0.00117,0.00167,0.31,0.0783,135.205,"pop, Dance/Electronic" +Limp Bizkit,Take A Look Around,321040,False,2000,72,0.425,0.852,11,-5.607,1,0.046,0.0175,0.306,0.0935,0.512,101.968,metal +Darude,Sandstorm,225493,False,2001,69,0.528,0.965,11,-7.984,0,0.0465,0.141,0.985,0.0797,0.587,136.065,"pop, Dance/Electronic" +Da Brat,What'chu Like (feat. Tyrese),221160,True,2000,53,0.879,0.681,10,-8.951,0,0.24,0.017,0.0,0.0669,0.817,99.974,"hip hop, pop, R&B" +Moloko,The Time Is Now,318280,False,2000,54,0.682,0.743,9,-10.644,0,0.165,0.35,0.000129,0.277,0.546,127.962,"pop, Dance/Electronic" +Chicane,Don't Give Up,210786,False,2016,47,0.644,0.72,10,-9.635,0,0.0419,0.00145,0.504,0.0839,0.53,132.017,Dance/Electronic +DMX,Party Up,268866,True,1999,71,0.51,0.931,11,-3.302,1,0.347,0.0738,0.0,0.5,0.53,201.936,"hip hop, pop" +Debelah Morgan,Dance with Me,220106,False,2000,49,0.85,0.674,7,-7.981,0,0.0373,0.309,0.000645,0.0356,0.74,115.005,"pop, R&B" +Madonna,Music,225973,False,2000,58,0.736,0.802,7,-8.527,1,0.0663,0.00149,0.0876,0.14,0.871,119.854,pop +Ruff Endz,No More,242560,False,2000,52,0.839,0.641,10,-5.669,0,0.0858,0.0324,4.56e-06,0.0602,0.927,97.004,R&B +Britney Spears,Born to Make You Happy,243533,False,1999,58,0.633,0.922,11,-4.842,0,0.0454,0.116,0.000465,0.071,0.686,84.11,pop +Montell Jordan,Get It On Tonite,276266,False,1999,59,0.813,0.491,10,-9.923,0,0.077,0.241,4.61e-05,0.0817,0.868,99.008,"hip hop, pop, R&B" +Kylie Minogue,Spinning Around,207866,False,2000,55,0.761,0.662,6,-7.645,0,0.0548,0.292,6.19e-05,0.0956,0.631,120.043,"pop, Dance/Electronic" +JAY-Z,Big Pimpin',283066,True,1999,69,0.88,0.814,11,-6.307,0,0.14,0.168,0.00672,0.0584,0.942,138.083,hip hop +LeAnn Rimes,I Need You,229826,False,2001,61,0.478,0.736,7,-7.124,1,0.0367,0.02,9.58e-05,0.118,0.564,144.705,"pop, country" +Avant,Separated,255600,False,2000,55,0.798,0.48,0,-5.564,1,0.0276,0.247,0.0,0.237,0.643,108.241,"pop, R&B" +Enrique Iglesias,Be With You,219360,False,1999,54,0.683,0.866,1,-5.436,0,0.0329,0.0395,0.00161,0.0483,0.542,121.996,"pop, latin" +Toni Braxton,He Wasn't Man Enough,261933,False,2000,66,0.739,0.947,11,-1.916,0,0.0411,0.00916,3.14e-05,0.326,0.766,88.009,"pop, R&B" +Bow Wow,Bounce With Me (feat. Xscape) - Edited Album Version,175893,False,2000,36,0.852,0.75,8,-5.153,1,0.168,0.434,0.0,0.265,0.934,72.016,"hip hop, pop, R&B" +Dr. Dre,Forgot About Dre,222293,True,1999,79,0.924,0.74,8,-1.299,1,0.0774,0.0827,0.0,0.163,0.621,133.974,hip hop +Missy Elliott,Hot Boyz,215466,True,1998,49,0.727,0.445,1,-11.241,1,0.291,0.339,0.0,0.18,0.527,81.125,"hip hop, pop, R&B" +Backstreet Boys,Show Me the Meaning of Being Lonely,234960,False,1999,68,0.63,0.625,6,-5.088,0,0.0252,0.231,0.0,0.0765,0.683,167.998,pop +Samantha Mumba,Gotta Tell You,201946,False,2018,43,0.729,0.632,0,-8.75,0,0.0279,0.191,0.0,0.166,0.774,109.981,pop +Mýa,Case Of The Ex (Whatcha Gonna Do),236906,False,2000,59,0.772,0.688,1,-4.715,0,0.0405,0.0548,9.79e-05,0.0725,0.348,98.0,"pop, R&B" +Mary Mary,Shackles (Praise You),198346,False,2000,64,0.779,0.834,7,-2.773,1,0.162,0.0343,0.0,0.0886,0.8,100.46,R&B +Next,Wifey,243666,False,2004,52,0.829,0.652,7,-8.693,0,0.108,0.067,0.0,0.0812,0.726,99.581,"hip hop, pop, R&B" +Janet Jackson,Doesn't Really Matter,265026,False,2001,47,0.771,0.796,5,-3.081,0,0.076,0.0993,0.00278,0.0981,0.801,99.316,"pop, R&B" +Ricky Martin,She Bangs - English Version,280626,False,2000,60,0.63,0.95,1,-4.012,1,0.0806,0.000915,6.51e-06,0.373,0.858,143.866,"pop, latin" +Jagged Edge,He Can't Love U,244053,False,2000,55,0.721,0.836,8,-3.972,0,0.206,0.112,0.0,0.235,0.508,126.279,"hip hop, pop, R&B" +Sisqo,Incomplete,274226,True,1999,60,0.746,0.443,1,-7.693,0,0.0771,0.282,0.0,0.14,0.272,119.311,"hip hop, pop, R&B" +JAY-Z,I Just Wanna Love U (Give It 2 Me),227866,True,2000,59,0.8,0.922,4,-5.125,0,0.24,0.301,9.31e-06,0.0352,0.801,98.631,hip hop +Mariah Carey,Thank God I Found You (feat. Joe & 98°),257360,False,1999,59,0.348,0.532,10,-5.882,1,0.0331,0.592,0.0,0.106,0.148,129.297,"pop, R&B" +Baha Men,Who Let The Dogs Out,198400,False,2000,65,0.869,0.887,0,-4.505,1,0.0993,0.0605,0.0,0.148,0.784,129.221,R&B +Donell Jones,"U Know What's Up (feat. Lisa ""Left Eye"" Lopes)",243733,True,1999,63,0.854,0.543,8,-6.166,0,0.0844,0.0402,5.73e-05,0.0419,0.868,103.032,"pop, R&B" +LeAnn Rimes,Can't Fight The Moonlight,215506,False,2001,65,0.628,0.834,6,-6.341,0,0.0497,0.403,0.0,0.051,0.626,97.865,"pop, country" +Oasis,Go Let It Out,278666,False,2000,0,0.408,0.849,2,-5.631,1,0.0333,0.0136,2.51e-05,0.56,0.628,84.192,"Folk/Acoustic, rock" +DJ Ötzi,Hey Baby (Radio Mix),219240,False,2010,58,0.666,0.968,10,-3.196,1,0.046,0.123,0.0,0.347,0.834,135.099,"pop, easy listening, Dance/Electronic" +P!nk,Most Girls,298960,False,2000,52,0.742,0.732,2,-6.046,0,0.0311,0.0424,0.00446,0.101,0.694,97.922,pop +Mariah Carey,Against All Odds (Take A Look at Me Now) (feat. Westlife),199480,False,2011,0,0.471,0.514,1,-5.599,1,0.0315,0.584,0.0,0.103,0.373,117.338,"pop, R&B" +Craig David,Fill Me In,257200,False,2000,60,0.682,0.744,8,-6.981,1,0.0365,0.376,0.00951,0.06,0.827,132.493,"hip hop, pop, R&B" +Christina Aguilera,I Turn to You,273706,False,1999,61,0.599,0.47,1,-8.356,1,0.0376,0.38,0.0,0.111,0.298,127.177,pop +Madonna,American Pie,273533,False,2000,58,0.631,0.734,5,-7.48,0,0.036,0.348,0.0,0.135,0.591,124.036,pop +Red Hot Chili Peppers,Otherside,255373,False,1999,78,0.458,0.795,0,-3.265,1,0.0574,0.00316,0.000202,0.0756,0.513,123.229,rock +Sammie,I Like It,251040,False,2000,55,0.826,0.656,9,-8.529,1,0.0617,0.0101,0.000113,0.0272,0.852,129.963,"hip hop, pop, R&B" +Craig David,7 Days,235133,False,2000,70,0.659,0.812,4,-7.499,0,0.0487,0.23,0.0,0.0951,0.888,83.014,"hip hop, pop, R&B" +Santana,Maria Maria (feat. The Product G&B),261973,False,1999,66,0.777,0.601,2,-5.931,1,0.126,0.0406,0.00201,0.0348,0.68,97.911,"rock, blues, latin" +Kandi,Don't Think I'm Not,243533,False,2000,55,0.859,0.622,11,-8.196,1,0.0445,0.0661,0.0,0.0394,0.433,134.007,"pop, R&B" +P!nk,There You Go,202800,False,2000,55,0.822,0.847,10,-6.729,0,0.0917,0.0854,0.0,0.0452,0.668,107.908,pop +Vengaboys,Shalala Lala,214819,False,2000,58,0.751,0.901,2,-5.802,1,0.0328,0.0504,0.00308,0.0395,0.973,124.017,pop +Ronan Keating,Life Is A Rollercoaster,234826,False,2000,59,0.655,0.791,0,-8.923,1,0.0302,0.1,0.000124,0.334,0.862,118.981,"pop, rock" +Madison Avenue,Don't Call Me Baby,228140,False,1999,56,0.808,0.982,3,-6.588,0,0.0311,0.0585,0.00689,0.35,0.961,124.999,Dance/Electronic +Destiny's Child,"Jumpin', Jumpin'",230200,False,1999,70,0.771,0.685,1,-4.639,1,0.0567,0.00543,0.00157,0.0537,0.683,88.997,"pop, R&B" +Céline Dion,That's the Way It Is,241373,False,1999,64,0.634,0.886,9,-5.424,1,0.0434,0.154,0.0,0.118,0.577,93.04,pop +3 Doors Down,Kryptonite,233933,False,2000,78,0.545,0.865,11,-5.708,0,0.0286,0.00664,1.1e-05,0.168,0.543,99.009,"pop, rock, metal" +Carl Thomas,I Wish,226760,False,2000,52,0.736,0.666,1,-4.929,1,0.0337,0.0593,3.82e-05,0.107,0.224,89.824,"pop, R&B" +Mystikal,Shake Ya Ass,256973,True,2000,57,0.914,0.607,7,-5.658,1,0.32,0.0626,0.0,0.0515,0.666,98.054,"hip hop, pop" +Fuel,Hemorrhage (In My Hands),236866,False,2000,49,0.313,0.831,1,-3.894,1,0.0404,0.000127,0.000341,0.24,0.332,152.034,"rock, pop, metal" +Donell Jones,Where I Wanna Be,253626,False,1999,57,0.664,0.396,5,-9.131,0,0.0298,0.52,0.0,0.268,0.453,102.053,"pop, R&B" +Savage Garden,Crash and Burn,281466,False,1999,54,0.581,0.607,4,-8.458,1,0.028,0.189,1.6e-06,0.0882,0.213,102.03,pop +Westlife,My Love,231760,False,2000,68,0.491,0.593,0,-5.975,1,0.0255,0.098,0.0,0.257,0.328,144.142,pop +All Saints,Pure Shores,268746,False,2000,62,0.631,0.664,6,-9.197,1,0.0242,0.0498,0.00042,0.0696,0.407,100.618,pop +Destiny's Child,"Independent Women, Pt. 1",221133,False,2001,65,0.73,0.602,6,-3.782,0,0.206,0.362,3.69e-06,0.169,0.927,97.954,"pop, R&B" +*NSYNC,It's Gonna Be Me,191040,False,2000,60,0.644,0.874,0,-4.666,0,0.0801,0.0459,2.24e-06,0.0584,0.882,165.09,pop +Erykah Badu,Bag Lady,348893,False,2000,54,0.724,0.416,5,-8.964,0,0.0841,0.365,0.0,0.0969,0.578,151.181,"hip hop, R&B" +Marc Anthony,You Sang To Me,347106,False,1999,56,0.578,0.894,10,-5.42,1,0.0296,0.0103,2.66e-06,0.216,0.741,165.98,"pop, latin" +Matchbox Twenty,Bent,256133,False,2000,54,0.518,0.83,6,-6.814,0,0.0386,0.0404,3.85e-06,0.375,0.527,95.468,"pop, rock" +Gabrielle,Rise,219093,False,2001,60,0.558,0.481,8,-9.487,1,0.026,0.315,8.83e-06,0.09,0.631,144.673,"pop, R&B" +Backstreet Boys,Shape of My Heart,230093,False,2000,70,0.575,0.786,9,-4.353,1,0.0296,0.252,0.0,0.159,0.518,96.102,pop +Creed,With Arms Wide Open,274800,False,1999,64,0.41,0.539,0,-8.412,1,0.0302,0.00425,0.00089,0.117,0.141,138.852,"pop, rock, metal" +The Corrs,Breathless,207506,False,2000,68,0.607,0.82,11,-7.754,1,0.0597,0.0541,6.8e-05,0.269,0.768,126.988,"pop, Folk/Acoustic" +Joe,I Wanna Know,296693,False,2000,65,0.725,0.487,8,-5.959,0,0.0368,0.26,1.09e-05,0.431,0.599,136.086,"pop, R&B" +Wheatus,Teenage Dirtbag,241666,True,1999,71,0.625,0.85,4,-3.904,1,0.0495,0.346,0.000233,0.174,0.633,94.661,set() +Christina Aguilera,Come on over Baby (All I Want Is You) - Radio Version,203333,False,1999,64,0.829,0.915,8,-3.205,1,0.106,0.226,1.25e-05,0.246,0.779,118.903,pop +Creed,Higher,316733,False,1999,69,0.459,0.83,2,-6.254,1,0.0364,5.15e-05,0.00014,0.206,0.431,155.827,"pop, rock, metal" +Britney Spears,Lucky,206226,False,2000,65,0.765,0.791,8,-5.707,1,0.0317,0.262,0.000154,0.0669,0.966,95.026,pop +Nelly,Country Grammar (Hot Shit),287000,True,2000,68,0.865,0.664,2,-6.822,1,0.108,0.00689,0.0,0.142,0.565,162.831,"hip hop, pop, R&B" +Shaggy,It Wasn't Me,227600,False,2000,76,0.853,0.606,0,-4.596,1,0.0713,0.0561,0.0,0.313,0.654,94.759,"hip hop, pop" +Destiny's Child,Survivor,254026,False,2001,70,0.514,0.911,1,-2.027,0,0.41,0.0559,0.0,0.775,0.619,161.109,"pop, R&B" +Eminem,Stan,404106,True,2000,83,0.78,0.768,6,-4.325,0,0.238,0.0371,2.34e-06,0.518,0.507,80.063,hip hop +Kylie Minogue,Can't Get You out of My Head,230640,False,2001,73,0.766,0.563,9,-7.516,0,0.0339,0.0263,0.683,0.115,0.964,126.007,"pop, Dance/Electronic" +Christina Aguilera,"Lady Marmalade - From ""Moulin Rouge"" Soundtrack",264893,False,2001,68,0.76,0.801,5,-3.769,1,0.0534,0.0144,1.49e-05,0.665,0.653,109.919,pop +Nelly Furtado,I'm Like A Bird,243160,False,2000,1,0.622,0.608,10,-5.085,1,0.036,0.138,5.38e-06,0.273,0.607,89.661,"hip hop, pop, latin" +Shakira,"Whenever, Wherever",196160,False,2001,74,0.794,0.832,1,-4.862,0,0.0407,0.237,1.14e-05,0.203,0.871,107.657,"pop, latin" +Jimmy Eat World,The Middle,165853,False,2001,78,0.643,0.849,2,-5.428,1,0.0526,0.0371,0.0,0.058,0.903,162.152,"rock, pop" +Train,Drops of Jupiter (Tell Me),259933,False,2001,77,0.481,0.638,0,-5.862,1,0.0276,0.153,0.0,0.154,0.497,79.064,pop +Geri Halliwell,It's Raining Men,254640,False,2001,62,0.637,0.929,5,-6.03,0,0.0447,0.063,0.00796,0.318,0.604,136.482,pop +Blu Cantrell,Hit 'Em Up Style (Oops!),250706,False,2001,71,0.667,0.773,5,-4.983,0,0.0586,0.201,0.0,0.404,0.667,89.976,"pop, R&B" +Britney Spears,I'm a Slave 4 U,203600,False,2001,69,0.847,0.843,5,-3.579,0,0.106,0.415,0.000134,0.107,0.963,110.027,pop +Kylie Minogue,In Your Eyes,197826,False,2001,62,0.689,0.894,6,-6.342,0,0.0672,0.133,4.72e-05,0.0681,0.709,123.971,"pop, Dance/Electronic" +Missy Elliott,One Minute Man (feat. Ludacris),252986,True,2001,57,0.622,0.669,9,-8.419,1,0.329,0.0266,2.97e-06,0.152,0.57,93.839,"hip hop, pop, R&B" +Mary J. Blige,Family Affair,265866,False,2001,76,0.911,0.551,8,-3.75,0,0.0449,0.132,4.12e-05,0.0863,0.969,92.887,"pop, R&B" +Faithless,We Come 1 - Radio Edit,222435,False,2015,53,0.645,0.903,5,-10.587,0,0.0441,0.00188,0.799,0.147,0.61,135.977,"pop, Dance/Electronic" +Limp Bizkit,Rollin' (Air Raid Vehicle),213760,True,2000,73,0.603,0.933,1,-3.358,1,0.171,0.00591,0.0,0.206,0.709,96.306,metal +Lasgo,Something,220973,False,2001,65,0.643,0.981,7,-6.644,0,0.0439,0.0271,8.93e-05,0.11,0.38,140.01,pop +iio,Rapture (feat.Nadia Ali),253586,False,2006,54,0.661,0.855,8,-8.403,1,0.0377,0.0722,0.0185,0.199,0.601,123.943,Dance/Electronic +Emma Bunton,What Took You So Long?,241000,False,2001,54,0.668,0.772,9,-5.4,0,0.0307,0.123,0.0,0.341,0.911,118.011,pop +112,It's Over Now,264933,False,2001,57,0.66,0.71,1,-4.541,1,0.0409,0.0106,7.01e-06,0.0736,0.233,97.988,"hip hop, pop, R&B" +Blue,All Rise,223546,False,2001,63,0.721,0.737,5,-2.734,0,0.0324,0.121,0.0,0.165,0.931,97.996,pop +Jessica Simpson,Irresistible,194026,False,2001,43,0.657,0.965,8,-2.771,0,0.0556,0.0285,8.84e-05,0.0552,0.669,93.013,"pop, R&B" +Crazy Town,Butterfly,216733,False,1999,71,0.736,0.811,9,-4.17,0,0.081,0.00132,0.000142,0.107,0.609,103.502,"rock, metal" +Michael Jackson,You Rock My World,337733,False,2001,64,0.854,0.673,4,-3.132,0,0.185,0.038,0.000227,0.255,0.955,95.0,"pop, R&B" +Eve,Let Me Blow Ya Mind,230133,True,2001,73,0.908,0.557,8,-4.243,0,0.107,0.242,0.0,0.0709,0.897,90.032,"hip hop, pop, R&B" +Jennifer Lopez,Ain't It Funny,246160,False,2001,0,0.707,0.869,5,-4.525,0,0.0481,0.104,0.000121,0.0813,0.621,99.825,"hip hop, pop, R&B" +Brandy,Another Day in Paradise - R&B-Version,271626,False,2002,50,0.7,0.787,6,-5.176,0,0.0327,0.00666,3.68e-05,0.0724,0.556,102.043,"hip hop, pop, R&B" +Nickelback,How You Remind Me,223840,False,2001,78,0.446,0.764,10,-5.042,1,0.033,0.00135,0.0,0.099,0.543,172.094,"rock, metal" +Daft Punk,One More Time,320357,False,2001,76,0.613,0.697,2,-8.618,1,0.133,0.0194,0.0,0.332,0.476,122.746,"hip hop, Dance/Electronic" +Outkast,Ms. Jackson,270506,True,2000,82,0.843,0.806,4,-5.946,0,0.269,0.143,0.0,0.0771,0.613,94.948,"hip hop, pop" +Fragma,Everytime You Need Me - Radio Version,213346,False,2001,50,0.682,0.917,11,-5.459,0,0.0318,0.15,0.0676,0.34,0.79,137.029,"pop, Dance/Electronic" +Mariah Carey,Loverboy,229173,False,2001,42,0.721,0.79,1,-4.125,1,0.124,0.183,0.0,0.1,0.821,103.141,"pop, R&B" +Dido,Thank You,218360,False,1999,73,0.725,0.583,1,-9.942,0,0.0427,0.3,0.000238,0.0665,0.762,79.984,pop +Joe,Stutter (feat. Mystikal) - Double Take Remix,213026,False,2000,57,0.767,0.759,6,-6.516,1,0.117,0.0513,0.0,0.31,0.677,89.989,"pop, R&B" +P.O.D.,Youth of the Nation,256240,False,2001,69,0.563,0.86,8,-7.533,1,0.0621,0.00834,0.0106,0.39,0.517,97.867,"rock, metal" +Jennifer Lopez,Play,211493,True,2001,57,0.775,0.729,1,-4.229,0,0.162,0.0303,0.00247,0.0361,0.895,104.719,"hip hop, pop, R&B" +Missy Elliott,Get Ur Freak On,211120,True,2001,68,0.797,0.75,0,-9.369,1,0.247,0.533,0.108,0.095,0.74,177.87,"hip hop, pop, R&B" +Ricky Martin,Nobody Wants to Be Lonely (with Christina Aguilera),252706,False,2008,52,0.635,0.854,10,-5.02,0,0.0612,0.00579,0.0083,0.0623,0.59,100.851,"pop, latin" +Christina Milian,AM To PM,231213,False,2001,60,0.872,0.868,10,-3.036,0,0.12,0.153,2.26e-05,0.843,0.822,105.005,"hip hop, pop, R&B" +Roger Sanchez,Another Chance,452906,False,2000,50,0.61,0.82,3,-10.029,1,0.0312,0.000211,0.146,0.138,0.45,127.993,Dance/Electronic +Gorillaz,Clint Eastwood,340920,True,2001,74,0.663,0.694,10,-8.627,0,0.171,0.0253,0.0,0.0698,0.525,167.953,hip hop +2Pac,Until The End Of Time,266506,True,2001,57,0.757,0.706,8,-6.665,1,0.203,0.0267,0.0,0.197,0.375,96.976,hip hop +Tamia,Stranger in My House,285386,False,2000,48,0.676,0.601,0,-4.905,1,0.0297,0.103,0.0,0.147,0.206,119.94,"pop, R&B" +Mary J. Blige,No More Drama,326240,False,2001,61,0.653,0.837,6,-6.818,0,0.124,0.534,7.12e-05,0.0784,0.64,97.914,"pop, R&B" +Jennifer Lopez,Love Don't Cost a Thing,221226,False,2001,67,0.786,0.842,4,-5.115,0,0.0707,0.00305,3.54e-06,0.473,0.685,97.577,"hip hop, pop, R&B" +Jamiroquai,Little L,295400,False,2001,65,0.878,0.724,10,-5.373,0,0.129,0.168,0.0116,0.133,0.904,121.906,pop +Case,Missing You,284666,False,2001,55,0.612,0.579,1,-6.417,0,0.0553,0.285,0.0,0.0556,0.716,86.31,"hip hop, pop, R&B" +112,Peaches & Cream,193093,False,2001,63,0.677,0.52,4,-6.255,0,0.334,0.00277,0.0,0.0532,0.768,203.862,"hip hop, pop, R&B" +Five,Let's Dance - Radio Edit,218626,False,2001,47,0.631,0.821,4,-7.853,0,0.0867,0.00876,0.000247,0.293,0.547,118.007,pop +Rui Da Silva,Touch Me (Radio Edit) [feat. Cassandra],213133,False,2001,54,0.464,0.922,0,-8.399,1,0.0638,0.124,0.227,0.0546,0.695,129.078,Dance/Electronic +Alien Ant Farm,Smooth Criminal,209266,False,2001,75,0.653,0.964,9,-4.261,0,0.0582,0.00316,0.00512,0.144,0.87,126.928,"rock, metal" +Erick Sermon,Music (feat. Marvin Gaye),223133,True,2001,57,0.897,0.466,10,-9.053,0,0.203,0.187,1.35e-06,0.0757,0.884,100.01,"hip hop, pop" +D12,Purple Pills,304506,True,2001,68,0.78,0.634,1,-5.941,1,0.16,0.0199,2.98e-05,0.29,0.754,125.25,"hip hop, pop, rock" +Usher,Pop Ya Collar - Radio Edit,210813,False,2016,26,0.888,0.8,5,-3.944,0,0.0946,0.0901,2.12e-05,0.229,0.866,106.957,"hip hop, pop, R&B" +Madonna,Don't Tell Me,280973,False,2000,53,0.699,0.618,7,-7.338,1,0.0594,0.0502,0.000925,0.0914,0.679,99.965,pop +Blue,If You Come Back,207560,False,2001,58,0.582,0.707,10,-4.487,1,0.0319,0.0822,0.0,0.0467,0.701,78.375,pop +Atomic Kitten,Eternal Flame - Single Version,195506,False,2001,58,0.578,0.581,0,-6.867,1,0.0318,0.0658,0.0042,0.0674,0.408,83.293,pop +Jagged Edge,Where the Party At (feat. Nelly),232573,False,2001,67,0.596,0.661,5,-6.239,0,0.226,0.31,0.0,0.0847,0.86,129.491,"hip hop, pop, R&B" +Afroman,Because I Got High,197760,True,2001,68,0.802,0.341,7,-8.56,1,0.488,0.169,0.0,0.0783,0.849,166.01,hip hop +S Club 7,Don't Stop Movin',233626,False,2001,63,0.822,0.672,7,-6.133,1,0.0329,0.0285,0.0,0.213,0.91,117.033,pop +Craig David,Fill Me In,257200,False,2000,60,0.682,0.744,8,-6.981,1,0.0365,0.376,0.00951,0.06,0.827,132.493,"hip hop, pop, R&B" +Nelly Furtado,Turn Off The Light,276106,False,2000,1,0.587,0.679,2,-6.26,1,0.0927,0.0839,0.000316,0.413,0.65,180.184,"hip hop, pop, latin" +The Supermen Lovers,Starlight - Radio Edit,234400,False,2001,61,0.739,0.679,0,-5.079,1,0.0373,0.000424,0.0187,0.389,0.786,127.487,Dance/Electronic +Mis-Teeq,All I Want - Sunship Radio Edit,207400,False,2004,46,0.795,0.919,9,-3.07,0,0.0835,0.37,0.00863,0.189,0.96,134.079,"pop, Dance/Electronic" +Jagged Edge,Promise,246720,False,2000,56,0.751,0.568,5,-5.431,0,0.0872,0.136,0.0,0.0288,0.646,128.002,"hip hop, pop, R&B" +S Club 7,Have You Ever,201533,False,2001,60,0.585,0.702,0,-5.734,1,0.0325,0.538,0.0,0.216,0.51,139.909,pop +Enya,Only Time,218546,False,2000,69,0.418,0.249,3,-13.744,1,0.0301,0.841,0.661,0.112,0.213,82.803,"World/Traditional, Folk/Acoustic" +Faith Hill,There You'll Be,222120,False,2001,60,0.321,0.511,8,-7.77,1,0.0306,0.513,0.0,0.127,0.17,128.98,"pop, country" +Ronan Keating,Lovin' Each Day,212973,False,2000,58,0.541,0.899,1,-6.261,1,0.0633,0.0178,2.95e-06,0.286,0.829,106.676,"pop, rock" +Destiny's Child,Bootylicious,207906,False,2001,64,0.84,0.835,1,-4.386,0,0.275,0.00281,1.11e-06,0.152,0.637,103.376,"pop, R&B" +Steps,It's the Way You Make Me Feel,197360,False,2000,53,0.573,0.665,8,-5.081,1,0.0239,0.108,2.09e-06,0.095,0.347,105.006,"pop, Dance/Electronic" +Sunshine Anderson,Heard It All Before,295826,False,2001,56,0.697,0.925,3,-4.209,0,0.216,0.0614,0.0,0.095,0.678,96.951,"pop, R&B" +Lenny Kravitz,Again,231666,False,2000,68,0.55,0.804,2,-5.218,1,0.0271,0.0148,4.33e-05,0.105,0.789,79.166,"rock, pop" +Daniel Bedingfield,Gotta Get Thru This - D'N'D Radio Edit,162333,False,2002,46,0.836,0.762,7,-5.044,0,0.0598,0.0826,5.48e-05,0.102,0.941,133.592,pop +DB Boulevard,Point Of View - Radio Edit,231166,False,2018,0,0.676,0.715,6,-6.854,1,0.0287,0.00284,0.0746,0.0685,0.275,129.006,Dance/Electronic +Janet Jackson,All For You,329933,False,2001,65,0.753,0.934,2,-3.011,1,0.0736,0.0174,0.065,0.128,0.73,113.525,"pop, R&B" +Atomic Kitten,Whole Again,185013,False,2001,66,0.747,0.706,4,-4.653,1,0.0413,0.0844,0.00355,0.174,0.567,94.019,pop +Ja Rule,Livin' It Up,257066,False,2001,64,0.874,0.768,6,-4.086,1,0.311,0.0554,0.0,0.041,0.636,106.095,"hip hop, pop, R&B" +Destiny's Child,"Independent Women, Pt. 1",221133,False,2001,65,0.73,0.602,6,-3.782,0,0.206,0.362,3.69e-06,0.169,0.927,97.954,"pop, R&B" +DJ Pied Piper & The Masters Of Ceremonies,Do You Really Like It? - Radio Edit,217120,False,2001,54,0.847,0.877,2,-5.424,1,0.0493,0.124,0.0,0.148,0.764,131.044,Dance/Electronic +Jennifer Lopez,I'm Real (feat. Ja Rule) - Murder Remix,262133,True,2001,66,0.708,0.587,11,-7.93,0,0.151,0.273,0.0,0.0718,0.554,83.46,"hip hop, pop, R&B" +Musiq Soulchild,Love,304666,False,2000,0,0.569,0.385,1,-9.919,0,0.0499,0.342,0.0,0.0876,0.339,99.738,"pop, R&B" +So Solid Crew,21 Seconds,302826,True,2001,51,0.607,0.637,1,-11.072,1,0.241,0.0191,0.0,0.124,0.818,137.03,Dance/Electronic +Robbie Williams,Eternity,302760,False,2001,48,0.469,0.316,10,-8.106,1,0.0265,0.504,6.04e-06,0.0919,0.199,77.967,"pop, rock" +Basement Jaxx,Romeo,217493,False,2001,0,0.713,0.829,2,-4.171,1,0.0491,0.00769,0.00203,0.139,0.844,126.853,"pop, Dance/Electronic" +Ludacris,Southern Hospitality (Featuring Pharrell),300933,True,2000,54,0.623,0.896,0,-4.719,1,0.247,0.035,0.0,0.0941,0.613,95.158,"hip hop, pop" +Gabrielle,Rise,219093,False,2001,60,0.558,0.481,8,-9.487,1,0.026,0.315,8.83e-06,0.09,0.631,144.673,"pop, R&B" +Nelly,Ride Wit Me,291781,True,2000,75,0.85,0.7,7,-6.49,1,0.0478,0.0616,1.8e-06,0.244,0.722,101.875,"hip hop, pop, R&B" +Trick Daddy,I'm a Thug,254400,True,2001,59,0.933,0.561,11,-5.961,1,0.122,0.0275,0.0,0.102,0.586,139.976,"hip hop, pop" +Travis,Sing,228800,False,2001,68,0.33,0.905,4,-5.047,1,0.0497,0.000233,8.14e-06,0.0712,0.259,163.142,"rock, pop" +JAY-Z,Izzo (H.O.V.A.),240626,True,2001,63,0.618,0.844,1,-4.051,0,0.342,0.0178,0.000126,0.0634,0.697,84.411,hip hop +Gabrielle,Out Of Reach,196986,False,2001,69,0.51,0.48,11,-6.567,1,0.0452,0.503,0.0,0.106,0.496,182.862,"pop, R&B" +OPM,Heaven Is a Halfpipe (If I Die),257426,True,2000,56,0.743,0.894,8,-6.886,1,0.0349,0.0755,0.00283,0.367,0.77,95.9,rock +K-Ci & JoJo,Crazy,262773,False,2019,30,0.68,0.644,0,-4.507,1,0.0258,0.084,0.0,0.549,0.484,116.097,"pop, R&B" +Staind,It's Been Awhile,264706,True,2001,64,0.509,0.774,6,-4.054,1,0.0338,0.00189,0.000549,0.143,0.0824,116.529,"rock, metal" +3LW,No More (Baby I'ma Do Right),263440,False,2000,56,0.721,0.723,2,-7.08,0,0.0631,0.102,4.4e-06,0.0651,0.761,88.933,"pop, R&B" +Ginuwine,Differences,265533,False,2001,65,0.562,0.594,3,-4.578,0,0.0558,0.318,0.0,0.311,0.423,62.876,"hip hop, pop, R&B" +*NSYNC,Gone,292000,False,2001,45,0.699,0.403,11,-8.564,0,0.0614,0.421,0.0,0.126,0.5,113.922,pop +Lifehouse,Hanging By A Moment,216360,False,2000,61,0.537,0.858,1,-4.903,1,0.0349,0.000966,0.0,0.0812,0.502,124.599,"pop, rock, metal" +Nelly,Dilemma,289160,True,2002,77,0.727,0.552,2,-8.074,0,0.14,0.227,0.000164,0.198,0.607,168.189,"hip hop, pop, R&B" +Eminem,Without Me,290320,True,2002,87,0.908,0.669,7,-2.827,1,0.0738,0.00286,0.0,0.237,0.662,112.238,hip hop +Avril Lavigne,Complicated,244506,False,2002,78,0.585,0.776,5,-5.898,1,0.0459,0.0572,7.74e-06,0.3,0.427,77.987,pop +Vanessa Carlton,A Thousand Miles,237493,False,2002,75,0.56,0.825,11,-3.862,1,0.0379,0.323,0.0,0.161,0.268,94.931,"Folk/Acoustic, pop" +The Calling,Wherever You Will Go,208600,False,2001,72,0.558,0.719,2,-5.113,1,0.0267,0.0367,0.0,0.115,0.371,112.027,pop +Shakira,Underneath Your Clothes,224066,False,2001,64,0.616,0.597,8,-5.328,1,0.0415,0.691,0.0,0.104,0.362,165.508,"pop, latin" +No Doubt,Underneath It All,302720,False,2001,27,0.729,0.731,4,-4.822,1,0.0684,0.232,1.57e-06,0.391,0.839,138.202,"rock, pop" +Truth Hurts,Addictive,226440,True,2002,58,0.701,0.677,11,-6.591,0,0.147,0.0737,0.000162,0.187,0.367,99.271,"pop, R&B" +Busta Rhymes,I Know What You Want (feat. Flipmode Squad),324306,True,2002,68,0.648,0.759,6,-4.315,1,0.306,0.0142,0.0,0.648,0.518,85.996,"hip hop, pop" +Eminem,'Till I Collapse,297786,True,2002,85,0.548,0.847,1,-3.237,1,0.186,0.0622,0.0,0.0816,0.1,171.447,hip hop +Diddy,"I Need a Girl (Pt. 2) [feat. Loon, Ginuwine & Mario Winans]",285586,False,2004,69,0.713,0.471,1,-7.392,0,0.483,0.421,0.0,0.0308,0.779,199.764,"hip hop, pop" +Christina Milian,When You Look At Me - Radio Edit,222546,False,2001,60,0.955,0.839,3,-3.399,0,0.127,0.0934,0.000175,0.099,0.825,108.955,"hip hop, pop, R&B" +Madonna,Die Another Day,276360,False,2003,53,0.792,0.797,0,-5.166,0,0.0901,0.062,0.00325,0.0896,0.491,123.993,pop +Jennifer Lopez,I'm Gonna Be Alright (feat. Nas),172240,False,2002,53,0.718,0.69,10,-4.382,0,0.265,0.106,0.0,0.271,0.774,93.401,"hip hop, pop, R&B" +Angie Martinez,If I Could Go! (feat. Lil' Mo & Sacario),244466,False,2019,40,0.583,0.643,9,-7.486,0,0.355,0.171,0.0,0.0395,0.7,195.685,pop +Linkin Park,In the End,216880,False,2000,83,0.556,0.864,3,-5.87,0,0.0584,0.00958,0.0,0.209,0.4,105.143,"rock, metal" +Las Ketchup,The Ketchup Song (Aserejé) - Spanglish Version,213973,False,2002,66,0.607,0.923,1,-6.777,1,0.0948,0.0193,1.1e-06,0.0924,0.868,184.819,set() +Aaliyah,Rock The Boat,275026,False,2019,0,0.641,0.72,5,-5.209,1,0.0336,0.0688,0.00286,0.193,0.418,92.988,"hip hop, pop, R&B" +Holly Valance,Kiss Kiss,204400,False,2002,54,0.705,0.717,4,-4.944,0,0.125,0.00369,0.461,0.0701,0.554,97.036,"pop, Dance/Electronic" +Sugababes,Round Round,236426,False,2002,59,0.74,0.845,6,-3.802,0,0.0338,0.00287,6.23e-06,0.115,0.749,126.607,"pop, R&B" +Marilyn Manson,Tainted Love,200426,False,2003,60,0.622,0.876,8,-7.779,1,0.0728,0.00107,1.83e-06,0.292,0.537,142.017,"rock, metal" +Brandy,What About Us?,253200,False,2002,43,0.686,0.698,5,-2.942,0,0.223,0.263,0.0,0.39,0.748,93.118,"hip hop, pop, R&B" +Chad Kroeger,Hero (feat. Josey Scott),200480,False,2003,66,0.427,0.843,4,-4.54,1,0.0364,0.00216,0.0,0.179,0.304,147.387,metal +Britney Spears,Overprotected - Radio Edit,198600,False,2001,61,0.682,0.894,0,-1.73,0,0.0727,0.0381,0.0,0.416,0.845,95.992,pop +Scooter,The Logical Song,234116,False,2013,46,0.527,0.933,6,-6.277,0,0.0382,0.00112,0.0362,0.425,0.493,144.043,"pop, Dance/Electronic" +Missy Elliott,4 My People (feat. Eve),289373,True,2001,49,0.969,0.701,1,-7.503,1,0.156,0.14,0.00161,0.201,0.905,121.392,"hip hop, pop, R&B" +Aaliyah,More Than A Woman,230346,False,2002,42,0.646,0.638,11,-5.803,1,0.0787,0.0556,0.0014,0.182,0.74,86.994,"hip hop, pop, R&B" +Brandy,Full Moon,248933,False,2002,52,0.611,0.654,10,-4.823,0,0.139,0.372,0.00432,0.0926,0.609,105.922,"hip hop, pop, R&B" +Blue,One Love,207186,False,2002,60,0.683,0.722,8,-4.039,0,0.0358,0.0243,4.2e-05,0.35,0.854,95.012,pop +Busta Rhymes,Pass The Courvoisier Part II (feat. P. Diddy & Pharrell) - Remix,238600,True,2001,55,0.697,0.793,6,-4.699,0,0.323,0.0844,0.0,0.0619,0.56,89.767,"hip hop, pop" +Céline Dion,A New Day Has Come - Radio Remix,259773,False,2001,59,0.574,0.691,6,-5.103,1,0.035,0.0826,1.65e-05,0.149,0.195,91.969,pop +Scooter,Nessaja,208449,False,2013,47,0.486,0.904,2,-6.642,1,0.0364,0.000748,0.0984,0.187,0.118,143.048,"pop, Dance/Electronic" +Eve,Gangsta Lovin',239266,True,2002,58,0.723,0.84,1,-3.523,0,0.0608,0.0619,0.0,0.0945,0.827,94.332,"hip hop, pop, R&B" +Disturbing Tha Peace,Move Bitch,272293,True,2002,59,0.777,0.751,1,-5.692,1,0.133,0.245,0.0,0.1,0.191,177.894,"hip hop, pop" +Khia,"My Neck, My Back (Lick It)",222560,True,2015,59,0.859,0.625,9,-4.234,0,0.13,0.0296,0.0,0.0537,0.71,102.072,"pop, R&B" +Tweet,Oops (Oh My) [feat. Missy Elliott],237800,True,2002,64,0.66,0.536,10,-8.786,0,0.29,0.233,0.00669,0.111,0.775,159.963,"hip hop, pop, R&B" +Kylie Minogue,Spinning Around,207866,False,2000,55,0.761,0.662,6,-7.645,0,0.0548,0.292,6.19e-05,0.0956,0.631,120.043,"pop, Dance/Electronic" +Nelly,Hot In Herre,228240,True,2002,75,0.956,0.745,11,-4.753,0,0.12,0.206,0.0,0.0615,0.912,107.075,"hip hop, pop, R&B" +Gareth Gates,Anyone of Us (Stupid Mistake),227866,False,2003,53,0.7,0.72,1,-6.061,1,0.0261,0.285,0.0,0.0845,0.654,106.986,pop +Christina Aguilera,Dirrty (feat. Redman),298853,False,2002,68,0.64,0.889,2,-3.073,1,0.322,0.107,0.0,0.339,0.436,99.931,pop +B2K,Uh Huh,223293,False,2002,49,0.855,0.681,2,-4.955,1,0.0688,0.0881,3.7e-06,0.0592,0.908,99.699,"hip hop, pop, R&B" +Sugababes,Freak Like Me,195866,False,2002,45,0.517,0.919,5,-3.451,1,0.0925,0.0996,1.81e-06,0.557,0.387,91.868,"pop, R&B" +Dirty Vegas,Days Go By,432146,False,2002,48,0.786,0.853,9,-8.274,0,0.0688,0.0499,0.0872,0.35,0.309,126.99,Dance/Electronic +Fat Joe,What's Luv? (feat. Ja-Rule & Ashanti),267093,True,2001,73,0.835,0.707,8,-5.074,1,0.0598,0.0219,2.19e-05,0.108,0.919,93.955,"hip hop, pop" +Liberty X,Just A Little,237359,False,2019,43,0.786,0.614,5,-6.554,0,0.0574,0.00616,0.0,0.049,0.742,103.887,pop +No Doubt,Hella Good,242586,False,2001,26,0.771,0.665,8,-3.954,1,0.035,0.0135,0.0123,0.0903,0.844,115.142,"rock, pop" +LL Cool J,Luv U Better,287000,False,2002,2,0.668,0.806,6,-3.9,1,0.239,0.232,0.0,0.18,0.721,95.022,"hip hop, pop, R&B" +Red Hot Chili Peppers,Can't Stop,269000,False,2002,80,0.618,0.938,9,-3.442,1,0.0456,0.0179,0.0,0.167,0.875,91.455,rock +Musiq Soulchild,Halfcrazy,254493,False,2002,58,0.412,0.77,11,-6.057,0,0.18,0.462,1.5e-06,0.0485,0.418,101.955,"pop, R&B" +Sean Paul,Gimme the Light,228000,True,2002,59,0.778,0.761,2,-5.529,1,0.0525,0.141,4.61e-05,0.0441,0.923,107.288,"hip hop, pop" +Faith Evans,I Love You,267160,False,2001,53,0.489,0.757,11,-4.062,0,0.0926,0.00981,0.0,0.234,0.567,168.004,"hip hop, pop, R&B" +Charli Baltimore,Down 4 U,318213,True,2002,47,0.64,0.59,7,-7.442,1,0.32,0.252,0.0,0.665,0.519,83.133,pop +Kylie Minogue,Love at First Sight,238266,False,2001,55,0.603,0.774,3,-6.066,0,0.0428,0.0288,0.051,0.0533,0.48,124.994,"pop, Dance/Electronic" +N.O.R.E.,Nothin',264653,True,2002,58,0.791,0.863,3,-6.118,0,0.151,0.317,1.13e-06,0.27,0.742,97.035,"hip hop, pop" +Gareth Gates,Unchained Melody,233666,False,2003,35,0.368,0.426,10,-8.049,1,0.0277,0.255,5.39e-06,0.108,0.208,106.52,pop +Diddy,I Need a Girl (Pt. 1) [feat. Usher & Loon],268800,False,2005,63,0.66,0.707,6,-5.758,1,0.208,0.397,0.0,0.211,0.761,89.279,"hip hop, pop" +Big Brovaz,Nu Flow,201480,False,2001,45,0.783,0.726,7,-8.718,1,0.106,0.0903,2.53e-06,0.363,0.784,148.062,pop +No Doubt,Hey Baby,207040,False,2001,24,0.705,0.872,11,-3.557,0,0.125,0.0602,0.0,0.228,0.746,93.63,"rock, pop" +Ashanti,Baby,265706,True,2002,55,0.567,0.58,4,-5.757,0,0.223,0.0084,5.37e-06,0.0342,0.46,157.596,"hip hop, pop, R&B" +Sophie Ellis-Bextor,Murder On The Dancefloor,230013,False,2002,62,0.734,0.848,1,-5.285,0,0.0309,0.00312,1.16e-05,0.313,0.863,117.31,"pop, Dance/Electronic" +Justin Timberlake,Like I Love You,283626,False,2002,62,0.853,0.811,6,-4.927,0,0.0646,0.0439,0.000307,0.0703,0.9,114.964,pop +Mary J. Blige,Rainy Dayz,276373,False,2001,32,0.412,0.667,6,-6.997,1,0.229,0.195,0.0,0.305,0.533,68.942,"pop, R&B" +A1,Caught in the Middle,206466,False,2001,53,0.519,0.874,5,-5.122,1,0.034,0.0524,0.0,0.243,0.572,96.072,pop +Eminem,Cleanin' Out My Closet,297840,True,2002,71,0.908,0.758,9,-4.753,0,0.174,0.0687,0.0,0.0783,0.87,148.015,hip hop +Ashanti,Happy,262226,True,2002,61,0.727,0.515,2,-6.553,0,0.0289,0.241,3.74e-06,0.124,0.695,89.488,"hip hop, pop, R&B" +Delta Goodrem,Born to Try,251280,False,2003,54,0.542,0.589,8,-6.813,1,0.0284,0.246,0.0,0.188,0.336,133.838,pop +P!nk,Get the Party Started,192533,False,2001,68,0.802,0.903,11,-3.267,0,0.046,0.0011,0.0,0.173,0.96,128.93,pop +Alicia Keys,A Woman's Worth,303333,False,2001,52,0.652,0.41,4,-8.323,0,0.158,0.333,0.0,0.064,0.495,75.09,"pop, R&B" +Daniel Bedingfield,Gotta Get Thru This - D'N'D Radio Edit,161240,False,2002,56,0.838,0.764,7,-5.076,0,0.0586,0.0862,0.000122,0.0906,0.924,133.592,pop +Coldplay,Clocks,307879,False,2002,79,0.577,0.749,5,-7.215,0,0.0279,0.599,0.0115,0.183,0.255,130.97,"rock, pop" +Missy Elliott,Work It,263226,True,2002,70,0.884,0.677,1,-5.603,1,0.283,0.0778,0.0,0.0732,0.584,101.868,"hip hop, pop, R&B" +Ronan Keating,If Tomorrow Never Comes,214306,False,2002,60,0.555,0.496,10,-6.136,1,0.028,0.333,0.0,0.0956,0.27,79.01,"pop, rock" +Big Tymers,Still Fly,335613,True,2002,64,0.675,0.607,11,-3.538,1,0.162,0.0302,8.97e-06,0.339,0.56,177.928,"hip hop, pop" +Creed,My Sacrifice,294600,False,2001,69,0.32,0.88,2,-6.035,1,0.0504,0.000101,4.31e-06,0.076,0.257,146.349,"pop, rock, metal" +Ja Rule,Always On Time,245133,True,2001,73,0.839,0.706,5,-6.104,0,0.199,0.208,0.0,0.242,0.839,96.673,"hip hop, pop, R&B" +Ashanti,Foolish,227386,True,2002,70,0.665,0.695,0,-5.763,1,0.0532,0.347,0.0,0.106,0.707,90.119,"hip hop, pop, R&B" +Avril Lavigne,Sk8er Boi,204000,False,2002,75,0.487,0.9,0,-4.417,1,0.0482,6.79e-05,0.0,0.358,0.484,149.937,pop +DJ Sammy,Heaven,233600,False,2002,63,0.571,0.953,8,-5.601,1,0.0548,0.0011,0.000606,0.226,0.601,137.965,pop +Britney Spears,"I'm Not a Girl, Not Yet a Woman",231066,False,2001,58,0.534,0.543,3,-6.857,1,0.0245,0.579,0.0,0.112,0.418,78.996,pop +Cam’ron,Oh Boy,204706,True,2002,67,0.754,0.767,6,-5.586,1,0.145,0.0216,0.0,0.172,0.828,83.014,"hip hop, pop" +Red Hot Chili Peppers,By the Way,216933,False,2002,73,0.451,0.97,0,-4.938,1,0.107,0.0264,0.00355,0.102,0.198,122.444,rock +Elvis Presley,A Little Less Conversation - JXL Radio Edit Remix,211506,False,2002,60,0.597,0.97,4,-5.972,0,0.0502,0.000385,0.205,0.133,0.717,114.999,"rock, easy listening" +Atomic Kitten,The Tide Is High - Radio Mix,206093,False,2002,65,0.783,0.649,7,-4.127,1,0.0322,0.0209,0.00482,0.067,0.665,103.997,pop +*NSYNC,Girlfriend,253600,False,2001,50,0.745,0.807,0,-5.191,0,0.0884,0.0887,1.49e-05,0.0283,0.858,93.967,pop +Robbie Williams,Somethin' Stupid,170493,False,2001,64,0.654,0.515,0,-12.185,1,0.0261,0.429,5.62e-06,0.174,0.677,106.191,"pop, rock" +Alanis Morissette,Hands Clean,269400,False,2002,57,0.513,0.82,7,-5.428,1,0.0299,0.00192,2.83e-06,0.504,0.52,99.952,"pop, Folk/Acoustic" +Usher,U Don't Have to Call,269400,True,2001,59,0.793,0.568,1,-4.958,1,0.0459,0.0478,3.57e-05,0.0304,0.806,100.005,"hip hop, pop, R&B" +Creed,One Last Breath - Radio Version,242000,False,2015,47,0.38,0.725,2,-6.094,1,0.0365,0.00571,0.00015,0.3,0.202,130.581,"pop, rock, metal" +The Goo Goo Dolls,Here Is Gone,238173,False,2002,56,0.338,0.685,1,-5.178,1,0.0322,0.00052,0.000512,0.109,0.221,102.192,"pop, rock" +Puddle Of Mudd,She Hates Me,216760,True,2001,70,0.58,0.748,4,-5.433,1,0.0323,0.00935,0.0,0.753,0.584,109.781,"rock, metal" +Ludacris,Rollout (My Business),296586,True,2001,60,0.921,0.668,7,-8.73,1,0.235,0.0949,0.0,0.592,0.892,131.059,"hip hop, pop" +Blazin' Squad,Crossroads - Radio Edit,188693,False,2012,40,0.661,0.746,4,-5.153,1,0.042,0.109,0.0,0.325,0.502,144.188,set() +Default,Wasting My Time,268693,False,2001,40,0.443,0.769,2,-5.529,1,0.0312,0.00138,0.0,0.0677,0.17,147.973,"rock, pop, metal" +Michelle Branch,All You Wanted,217680,False,2001,60,0.499,0.72,8,-9.101,1,0.0286,0.00466,0.00303,0.122,0.652,96.103,"pop, Folk/Acoustic" +Daniel Bedingfield,If You're Not The One,257026,False,2002,58,0.688,0.538,10,-7.608,1,0.0292,0.504,0.0,0.111,0.27,119.998,pop +Enrique Iglesias,Escape,208626,False,2001,56,0.776,0.844,11,-5.305,1,0.0297,0.0277,0.000244,0.135,0.868,125.972,"pop, latin" +Puddle Of Mudd,Blurry,303920,False,2001,64,0.432,0.936,3,-4.537,0,0.0562,0.00691,0.0,0.14,0.499,157.469,"rock, metal" +Coldplay,In My Place,226680,False,2002,72,0.424,0.588,9,-5.455,1,0.0278,0.0553,4.95e-06,0.298,0.193,144.636,"rock, pop" +Outkast,The Whole World (feat. Killer Mike),295346,True,2001,51,0.814,0.852,2,-6.176,1,0.401,0.0958,1.32e-06,0.0288,0.717,184.682,"hip hop, pop" +P!nk,Don't Let Me Get Me - Radio Edit,210693,False,2001,60,0.624,0.85,0,-4.754,0,0.0756,0.00234,0.0,0.0621,0.615,98.525,pop +50 Cent,In Da Club,193466,True,2003,81,0.899,0.713,6,-2.752,0,0.366,0.255,0.0,0.0708,0.777,90.051,"hip hop, pop" +Sean Paul,Get Busy,211666,False,2002,74,0.735,0.824,10,-4.143,0,0.036,0.615,0.0,0.158,0.726,100.202,"hip hop, pop" +Eminem,"Lose Yourself - From ""8 Mile"" Soundtrack",322226,True,2004,77,0.686,0.735,2,-4.616,1,0.264,0.00921,0.00066,0.342,0.0596,171.355,hip hop +Beyoncé,Crazy In Love (feat. Jay-Z),236133,False,2003,76,0.646,0.77,2,-6.596,0,0.226,0.00249,0.0,0.0715,0.681,99.165,"pop, R&B" +Counting Crows,Big Yellow Taxi,225426,False,2003,50,0.669,0.873,8,-4.315,1,0.0494,0.00204,0.0,0.137,0.827,88.029,"rock, pop" +Black Eyed Peas,Where Is The Love?,272533,False,2003,68,0.835,0.687,5,-3.18,1,0.184,0.101,0.0,0.132,0.828,94.059,"hip hop, pop" +Jennifer Lopez,Jenny from the Block (feat. Jadakiss & Styles P.) - Track Masters Remix,187840,False,2002,70,0.845,0.768,6,-5.448,1,0.188,0.00733,5.04e-06,0.0575,0.96,100.0,"hip hop, pop, R&B" +Black Eyed Peas,Shut Up,296186,False,2003,56,0.81,0.714,2,-3.966,1,0.243,0.0637,0.0,0.237,0.561,112.968,"hip hop, pop" +t.A.T.u.,All The Things She Said,214440,True,2020,39,0.527,0.834,5,-5.767,0,0.0474,0.0411,0.00599,0.105,0.381,179.92,pop +Benny Benassi,Satisfaction (Isak Original Extended) - Benny Benassi Presents The Biz,285570,False,2003,66,0.793,0.698,11,-3.626,1,0.104,0.163,0.145,0.0745,0.339,130.017,"pop, Dance/Electronic" +Busta Rhymes,I Know What You Want (feat. Flipmode Squad),324306,True,2002,68,0.648,0.759,6,-4.315,1,0.306,0.0142,0.0,0.648,0.518,85.996,"hip hop, pop" +Panjabi MC,Mundian to Bach Ke,244666,False,2003,61,0.778,0.879,8,-4.951,0,0.0371,0.399,0.792,0.15,0.942,98.077,"World/Traditional, hip hop" +Mis-Teeq,Scandalous - U.S. Radio Edit,238840,False,2004,59,0.734,0.668,0,-3.935,0,0.0349,0.0526,0.0,0.0925,0.651,99.971,"pop, Dance/Electronic" +JAY-Z,03' Bonnie & Clyde,205560,True,2002,71,0.759,0.678,9,-5.148,0,0.314,0.23,0.0,0.15,0.327,89.64,hip hop +50 Cent,P.I.M.P.,249480,True,2003,76,0.712,0.772,10,-3.024,0,0.346,0.0521,4.35e-06,0.0368,0.848,84.722,"hip hop, pop" +Jamelia,Superstar,215480,False,2003,69,0.801,0.645,1,-6.93,1,0.0356,0.0457,0.0,0.0357,0.824,110.01,pop +Linkin Park,Faint,162600,False,2003,72,0.554,0.978,4,-3.554,1,0.131,0.111,0.0,0.0731,0.594,135.095,"rock, metal" +DMX,X Gon' Give It To Ya,217586,True,2007,70,0.761,0.899,10,-3.09,0,0.183,0.0135,0.0,0.0719,0.673,95.027,"hip hop, pop" +Evanescence,Bring Me To Life,235893,False,2003,79,0.331,0.943,4,-3.188,0,0.0698,0.00721,2.06e-06,0.242,0.296,94.612,metal +Sugababes,Hole In The Head,218173,False,2003,57,0.785,0.933,6,-4.629,0,0.0309,0.0303,0.0,0.137,0.962,125.011,"pop, R&B" +Craig David,Rise & Fall (feat. Sting),287226,False,2002,59,0.405,0.589,6,-7.51,0,0.0586,0.252,0.0,0.661,0.808,83.373,"hip hop, pop, R&B" +Three Days Grace,I Hate Everything About You,231480,False,2003,72,0.498,0.83,6,-5.157,0,0.0421,0.00461,0.0,0.139,0.453,89.342,"rock, metal" +Lil Jon & The East Side Boyz,Get Low,324600,False,2002,47,0.78,0.604,1,-8.019,1,0.0396,0.022,0.0,0.194,0.12,101.043,"hip hop, pop" +B2K,"Bump, Bump, Bump (feat. P. Diddy)",282773,False,2003,62,0.835,0.68,1,-6.02,0,0.21,0.1,0.0,0.049,0.889,95.508,"hip hop, pop, R&B" +Baby Bash,Suga Suga,239026,False,2003,73,0.662,0.748,5,-3.041,0,0.268,0.688,8.43e-06,0.0841,0.535,82.331,"hip hop, pop, R&B, latin" +Aaliyah,Miss You - Main,245240,False,2002,36,0.602,0.34,10,-9.867,1,0.0375,0.618,0.0,0.234,0.511,109.598,"hip hop, pop, R&B" +Britney Spears,Me Against the Music (feat. Madonna) - LP Version / Video Mix,223773,False,2003,59,0.804,0.836,6,-6.635,0,0.089,0.32,0.0,0.213,0.85,120.046,pop +Blue,Sorry Seems To Be The Hardest Word,210066,False,2004,57,0.564,0.6,7,-6.814,0,0.0305,0.183,2.19e-06,0.38,0.442,78.955,pop +Delta Goodrem,Lost Without You,248546,False,2003,54,0.581,0.747,7,-6.682,1,0.0283,0.0365,1.69e-06,0.193,0.514,146.301,pop +Justin Timberlake,Rock Your Body,267266,False,2002,76,0.892,0.714,4,-6.055,0,0.141,0.201,0.000234,0.0521,0.817,100.972,pop +The White Stripes,Seven Nation Army,231920,False,2003,3,0.741,0.469,4,-7.627,0,0.0805,0.00601,0.447,0.306,0.313,123.904,"rock, blues" +Youngbloodz,Damn! (feat. Lil' Jon) - Club Mix,298600,True,2003,54,0.772,0.724,6,-5.799,0,0.307,0.0149,0.0,0.355,0.518,84.038,"hip hop, pop" +Beyoncé,Baby Boy (feat. Sean Paul),244826,False,2003,63,0.655,0.488,1,-9.17,1,0.22,0.0825,1.16e-06,0.221,0.791,91.025,"pop, R&B" +Lil' Kim,Magic Stick,359973,True,2019,47,0.849,0.498,2,-7.872,1,0.272,0.116,4.49e-05,0.268,0.502,92.98,"hip hop, pop, R&B" +Christina Aguilera,Can't Hold Us Down (feat. Lil' Kim),255266,False,2002,61,0.859,0.658,8,-4.481,1,0.192,0.0326,0.00181,0.0651,0.538,98.989,pop +Nickelback,Someday,207466,False,2003,68,0.455,0.858,11,-5.659,0,0.0391,0.000616,0.000247,0.226,0.597,163.118,"rock, metal" +Blue,U Make Me Wanna - Radio Edit,222400,False,2003,48,0.6,0.736,7,-6.228,1,0.0329,0.325,0.0,0.137,0.848,158.108,pop +Ashanti,Rain On Me,297960,False,2003,50,0.606,0.699,8,-3.103,0,0.0337,0.343,1.89e-06,0.0757,0.509,83.701,"hip hop, pop, R&B" +Trapt,Headstrong,285569,True,2002,72,0.492,0.896,4,-5.819,0,0.117,0.000172,2.78e-06,0.189,0.534,184.981,"rock, metal" +Simply Red,Sunrise,199040,False,2008,58,0.791,0.733,5,-5.123,0,0.029,0.00189,0.201,0.0859,0.917,105.987,"rock, R&B, Folk/Acoustic, pop" +Lumidee,"Never Leave You (Uh Oooh, Uh Oooh)",184906,False,2003,61,0.811,0.657,6,-6.197,1,0.362,0.0391,0.712,0.0798,0.777,199.958,"pop, R&B" +Girls Aloud,Sound Of The Underground,221426,False,2003,58,0.568,0.867,11,-5.861,1,0.0365,0.00344,4.67e-05,0.328,0.88,163.944,"pop, Dance/Electronic" +Nelly,Shake Ya Tailfeather (feat. P. Diddy & Murphy Lee) - 2016 Remaster,293666,False,2016,52,0.527,0.808,1,-4.749,1,0.295,0.176,0.0,0.169,0.907,87.025,"hip hop, pop, R&B" +Room 5,Make Luv,212413,False,2003,57,0.883,0.887,1,-4.92,1,0.116,0.0111,0.63,0.0367,0.648,124.814,Dance/Electronic +Fatman Scoop,Be Faithful,164506,True,2009,49,0.649,0.713,7,-6.488,1,0.295,0.000787,0.0,0.318,0.629,101.129,hip hop +Limp Bizkit,Behind Blue Eyes,269973,False,2003,70,0.595,0.489,7,-6.66,1,0.0271,0.497,0.0,0.119,0.0902,120.315,metal +Sugababes,Too Lost In You,237693,False,2003,58,0.586,0.705,1,-4.904,0,0.0287,0.0485,0.0,0.15,0.459,98.014,"pop, R&B" +50 Cent,"Wanksta - From ""8 Mile"" Soundtrack",219400,True,2003,57,0.802,0.863,1,-3.552,1,0.2,0.0199,0.0,0.141,0.873,81.008,"hip hop, pop" +Pharrell Williams,Frontin' (feat. Jay-Z) - Club Mix,236506,True,2003,67,0.894,0.404,9,-5.913,0,0.13,0.453,1.57e-06,0.0801,0.759,102.009,"hip hop, pop" +Chingy,Right Thurr,250746,True,2003,69,0.866,0.749,2,-3.977,1,0.253,0.15,0.0,0.0614,0.891,92.074,"hip hop, pop, R&B" +Eminem,Sing For The Moment,339546,True,2002,71,0.668,0.824,5,-4.164,0,0.137,0.0017,3.05e-06,0.0426,0.185,163.961,hip hop +Lil' Kim,The Jump Off (feat. Mr. Cheeks) - Remix,234800,True,2003,48,0.853,0.752,1,-8.831,1,0.339,0.178,4.65e-06,0.0553,0.935,104.946,"hip hop, pop, R&B" +Mýa,My Love Is Like...Wo - Main Mix,209533,False,2003,48,0.831,0.695,4,-6.491,1,0.109,0.194,1.24e-05,0.309,0.722,132.817,"pop, R&B" +50 Cent,21 Questions,224440,True,2003,72,0.646,0.813,6,-3.846,0,0.299,0.349,9.37e-05,0.0427,0.895,92.729,"hip hop, pop" +Nelly,Air Force Ones,304000,True,2002,61,0.784,0.459,4,-9.74,0,0.317,0.0847,0.0,0.079,0.618,164.062,"hip hop, pop, R&B" +Missy Elliott,Gossip Folks (feat. Ludacris),234893,True,2002,57,0.707,0.538,0,-5.264,1,0.505,0.362,0.0,0.317,0.439,121.732,"hip hop, pop, R&B" +Girls Aloud,Jump,220360,False,2003,60,0.658,0.826,8,-6.031,1,0.0346,0.00189,0.00275,0.0492,0.795,134.465,"pop, Dance/Electronic" +Ultrabeat,Pretty Green Eyes - Radio Edit,201920,False,2003,59,0.553,0.775,1,-10.284,0,0.043,0.212,0.0,0.0728,0.51,139.993,"pop, Dance/Electronic" +Christina Aguilera,Fighter,245960,False,2002,67,0.435,0.92,4,-1.357,0,0.201,0.235,0.000353,0.552,0.45,188.899,pop +Frankie J,Don't Wanna Try,245293,False,2003,49,0.655,0.433,5,-7.577,0,0.0348,0.444,0.0,0.221,0.199,130.127,"hip hop, pop, R&B" +Snoop Dogg,Beautiful,299146,True,2005,67,0.893,0.74,11,-4.936,0,0.132,0.299,0.0,0.0881,0.963,101.025,"hip hop, pop" +Floetry,Say Yes,268373,False,2002,57,0.747,0.398,5,-12.932,0,0.0743,0.211,1.42e-06,0.113,0.428,117.901,"hip hop, pop, R&B" +Ginuwine,In Those Jeans,243306,True,2003,59,0.691,0.541,1,-5.873,1,0.0776,0.508,0.0,0.0753,0.319,127.683,"hip hop, pop, R&B" +Electric Six,Danger! High Voltage - Soulchild Radio Mix,214600,False,2003,0,0.66,0.698,11,-4.722,0,0.0302,1.92e-05,0.166,0.0517,0.511,123.005,rock +Rachel Stevens,Sweet Dreams My LA Ex - Radio Edit,208386,False,2003,38,0.85,0.862,2,-3.587,1,0.138,0.0733,0.00144,0.0936,0.908,130.036,"pop, Dance/Electronic" +Ludacris,Stand Up,213760,True,2003,57,0.751,0.84,6,-4.855,1,0.349,0.367,0.0,0.0916,0.801,99.057,"hip hop, pop" +Nas,I Can,253720,False,2002,58,0.837,0.885,6,-3.914,0,0.182,0.103,0.0,0.0666,0.694,95.313,hip hop +3 Doors Down,When I'm Gone,260333,False,2002,67,0.53,0.768,7,-5.611,1,0.0284,0.00385,0.0,0.103,0.374,148.095,"pop, rock, metal" +Tyrese,How You Gonna Act Like That,294693,False,2002,60,0.733,0.521,3,-3.657,0,0.0457,0.107,0.0,0.0692,0.525,112.913,"hip hop, pop, R&B" +Justin Timberlake,Cry Me a River,288333,False,2002,73,0.624,0.654,8,-6.582,0,0.183,0.577,0.0,0.104,0.564,73.898,pop +Westlife,Mandy,199320,False,2003,57,0.447,0.636,5,-5.08,1,0.0278,0.254,9.25e-05,0.11,0.376,105.678,pop +No Doubt,It's My Life,226053,False,2003,60,0.612,0.735,8,-5.074,0,0.0282,0.00202,0.00118,0.328,0.783,126.326,"rock, pop" +Amanda Perez,Angel,218760,False,2003,48,0.638,0.54,0,-6.849,1,0.0473,0.506,0.0,0.157,0.352,143.772,"pop, R&B" +JAY-Z,Excuse Me Miss,281240,True,2002,56,0.714,0.862,6,-5.531,1,0.286,0.0305,0.0,0.0884,0.887,92.849,hip hop +Coldplay,Clocks,307879,False,2002,79,0.577,0.749,5,-7.215,0,0.0279,0.599,0.0115,0.183,0.255,130.97,"rock, pop" +Kelly Rowland,Stole,249293,False,2002,54,0.649,0.718,7,-4.984,0,0.0594,0.00676,4.26e-06,0.174,0.477,79.993,"hip hop, pop, R&B" +Missy Elliott,Work It,263226,True,2002,70,0.884,0.677,1,-5.603,1,0.283,0.0778,0.0,0.0732,0.584,101.868,"hip hop, pop, R&B" +Ashanti,Rock Wit U (Awww Baby),209120,False,2003,65,0.71,0.797,4,-3.006,0,0.0582,0.408,0.0,0.16,0.849,94.998,"hip hop, pop, R&B" +Junior Senior,Move Your Feet,181826,False,2002,67,0.747,0.904,9,-2.623,1,0.0803,0.046,0.106,0.203,0.846,118.877,pop +Audioslave,Like a Stone,293960,False,2002,75,0.614,0.568,7,-5.477,0,0.0276,0.00797,0.0,0.0997,0.516,107.849,"rock, metal" +Sean Paul,Like Glue,232506,False,2002,61,0.757,0.78,1,-5.038,1,0.319,0.0811,0.0,0.113,0.59,97.917,"hip hop, pop" +Jaheim,Put That Woman First,245773,True,2002,55,0.649,0.536,5,-8.804,0,0.132,0.492,0.0,0.184,0.513,78.935,"pop, R&B" +Jennifer Lopez,All I Have (feat. LL Cool J),254466,False,2002,60,0.701,0.669,1,-5.265,1,0.107,0.271,0.0,0.158,0.446,83.066,"hip hop, pop, R&B" +Monica,So Gone,242773,False,2003,64,0.618,0.552,8,-6.863,1,0.0981,0.176,0.0,0.252,0.586,81.975,"pop, R&B" +Dido,White Flag,240040,False,2003,44,0.512,0.525,5,-6.823,1,0.0401,0.327,3.3e-06,0.081,0.294,169.951,pop +The Roots,The Seed (2.0),267933,True,2002,63,0.758,0.957,7,-2.281,1,0.0357,0.039,0.0,0.133,0.966,111.121,hip hop +Chingy,Holidae In,314400,True,2003,61,0.81,0.791,7,-5.909,1,0.181,0.0893,0.0,0.0838,0.948,153.067,"hip hop, pop, R&B" +Fabolous,Can't Let You Go (feat. Mike Shorey & Lil' Mo),223973,True,2003,61,0.646,0.6,9,-6.569,1,0.458,0.231,0.0,0.0794,0.811,192.082,"hip hop, pop, R&B" +Kelly Clarkson,Miss Independent,214773,False,2003,47,0.656,0.615,11,-6.359,0,0.137,0.0769,0.000415,0.0706,0.592,175.943,"pop, R&B" +Ja Rule,Mesmerize,278693,True,2002,61,0.769,0.646,1,-6.653,1,0.199,0.0566,0.0,0.406,0.433,90.079,"hip hop, pop, R&B" +Matchbox Twenty,Unwell,228706,False,2002,63,0.256,0.788,6,-5.263,0,0.0403,0.0298,0.0,0.707,0.429,80.718,"pop, rock" +Outkast,The Way You Move (feat. Sleepy Brown),234000,True,2003,66,0.871,0.597,5,-4.932,0,0.0464,0.126,0.000113,0.0638,0.635,125.999,"hip hop, pop" +Fabolous,Into You (feat. Tamia),295773,True,2003,67,0.546,0.538,7,-7.886,0,0.0523,0.233,0.0,0.118,0.58,182.12,"hip hop, pop, R&B" +Switchfoot,Meant to Live,201373,False,2003,61,0.397,0.903,2,-4.577,1,0.0457,0.00575,1.6e-06,0.362,0.556,151.551,"rock, pop" +Daniel Bedingfield,If You're Not The One,257026,False,2002,58,0.688,0.538,10,-7.608,1,0.0292,0.504,0.0,0.111,0.27,119.998,pop +Good Charlotte,Lifestyles of the Rich & Famous,190173,False,2002,68,0.62,0.93,1,-3.685,1,0.0374,0.00043,0.0,0.0686,0.609,106.22,"rock, metal" +Wayne Wonder,No Letting Go,202013,False,2003,63,0.496,0.819,5,-5.656,0,0.0444,0.0479,0.000215,0.177,0.317,100.215,"hip hop, R&B" +The Strokes,Reptilia,219826,False,2003,74,0.489,0.649,9,-5.11,1,0.0336,0.000603,0.713,0.101,0.77,158.009,rock +Avril Lavigne,I'm with You,223066,False,2002,70,0.457,0.406,9,-7.462,1,0.0291,0.08,0.0,0.117,0.208,151.95,pop +Will Young,Leave Right Now,214733,False,2003,55,0.641,0.445,6,-8.674,1,0.0368,0.145,0.0,0.108,0.383,81.931,pop +Usher,Yeah! (feat. Lil Jon & Ludacris),250373,False,2004,81,0.894,0.791,2,-4.699,1,0.112,0.0183,0.0,0.0388,0.583,105.018,"hip hop, pop, R&B" +Maroon 5,This Love,206200,False,2002,77,0.712,0.862,5,-4.612,0,0.0378,0.0525,0.0,0.093,0.809,95.051,pop +Britney Spears,Toxic,198800,False,2003,81,0.774,0.838,5,-3.914,0,0.114,0.0249,0.025,0.242,0.924,143.04,pop +Outkast,Hey Ya!,235213,False,2003,80,0.727,0.974,4,-2.261,0,0.0664,0.103,0.000532,0.174,0.965,79.526,"hip hop, pop" +Hoobastank,The Reason,232800,False,2003,79,0.472,0.671,4,-4.649,1,0.029,0.0129,0.0,0.159,0.0681,82.952,"pop, rock, metal" +Anastacia,Left Outside Alone,257426,False,2004,65,0.663,0.746,2,-3.567,0,0.0321,0.0697,0.0,0.0929,0.325,102.847,pop +Daddy Yankee,Gasolina,192600,False,2004,11,0.857,0.801,0,-6.499,1,0.0618,0.332,1.2e-06,0.0789,0.753,96.009,latin +Black Eyed Peas,Let's Get It Started - Spike Mix,217733,False,2004,53,0.785,0.799,11,-2.208,0,0.126,0.117,0.0,0.292,0.797,104.923,"hip hop, pop" +O-Zone,Dragostea Din Tei,215431,False,2004,56,0.809,0.965,0,-3.947,1,0.0427,0.177,0.0,0.0577,0.672,130.103,pop +Terror Squad,Lean Back,247426,True,2004,67,0.783,0.916,1,-3.344,1,0.415,0.11,0.0,0.0746,0.695,95.321,hip hop +Anastacia,Sick and Tired,212266,False,1999,56,0.598,0.868,0,-4.552,1,0.0373,0.00908,0.0,0.279,0.47,99.848,pop +Gwen Stefani,What You Waiting For?,221226,False,2004,60,0.676,0.948,5,-2.557,1,0.0628,0.0509,8.19e-06,0.384,0.731,136.027,"pop, R&B" +Ashanti,Only U - No Intro,186306,False,2004,56,0.601,0.621,6,-5.576,0,0.0434,0.0374,1.75e-06,0.188,0.278,94.547,"hip hop, pop, R&B" +The Rasmus,In the Shadows,257920,False,2003,68,0.6,0.796,2,-4.481,1,0.0275,0.000109,5.2e-05,0.484,0.75,105.991,metal +Houston,I Like That,236520,False,2004,59,0.797,0.502,10,-3.925,0,0.0968,0.0154,0.0,0.1,0.385,106.997,"hip hop, pop" +Beyoncé,Naughty Girl,208600,False,2003,63,0.735,0.466,6,-8.65,0,0.0857,0.239,6.24e-06,0.102,0.643,99.973,"pop, R&B" +Linkin Park,Numb,185586,False,2003,81,0.496,0.863,9,-4.153,1,0.0381,0.0046,0.0,0.639,0.243,110.018,"rock, metal" +Lloyd Banks,On Fire,187280,True,2004,61,0.759,0.825,8,-4.289,1,0.22,0.0923,0.00057,0.24,0.429,94.857,"hip hop, pop" +Akon,Locked Up,235066,True,2004,16,0.818,0.579,7,-4.475,1,0.101,0.023,0.0,0.107,0.354,89.987,pop +D12,My Band,298773,True,2004,73,0.851,0.849,1,-3.383,0,0.0828,0.497,2.05e-06,0.116,0.844,120.014,"hip hop, pop, rock" +Jay Sean,Eyes On You - Radio Mix,190493,False,2004,42,0.802,0.672,7,-4.971,1,0.0847,0.0775,0.0,0.136,0.619,96.525,"hip hop, pop, R&B" +Mario Winans,I Don't Wanna Know (feat. Enya & P. Diddy) - 2016 Remaster,257333,False,2016,66,0.833,0.515,11,-5.0,0,0.0462,0.347,0.00156,0.116,0.4,97.007,pop +Destiny's Child,Lose My Breath,242013,False,2004,61,0.814,0.899,1,-5.958,1,0.0637,0.00727,0.219,0.0979,0.545,119.011,"pop, R&B" +Eric Prydz,Call on Me - Radio Mix,171360,False,2004,72,0.597,0.837,10,-6.518,1,0.375,0.00427,0.00115,0.839,0.447,126.342,"pop, Dance/Electronic" +Ciara,Goodies (feat. Petey Pablo),223000,False,2004,67,0.826,0.647,5,-6.306,0,0.0491,0.0277,1.52e-06,0.298,0.872,102.017,"pop, R&B" +Kelis,Trick Me,206106,True,2003,63,0.97,0.72,1,-3.347,0,0.149,0.0369,0.000389,0.326,0.962,107.17,"pop, R&B, Dance/Electronic" +LL Cool J,Headsprung,267320,False,2004,56,0.718,0.786,0,-5.76,1,0.0284,0.0114,2.5e-05,0.0745,0.465,99.916,"hip hop, pop, R&B" +Spiderbait,Black Betty - Edit,205973,False,2004,63,0.562,0.865,7,-6.476,0,0.0922,2.06e-05,0.591,0.219,0.35,124.047,rock +George Michael,Amazing,265826,False,2004,57,0.805,0.754,10,-6.825,0,0.0394,0.0884,1.77e-06,0.117,0.88,128.429,set() +Britney Spears,My Prerogative,213893,False,2004,53,0.749,0.938,10,-4.423,0,0.118,0.0127,1.96e-06,0.103,0.619,111.014,pop +Nina Sky,Move Ya Body,232000,False,2004,67,0.87,0.712,11,-6.313,0,0.0432,0.0202,0.000331,0.0576,0.877,121.057,"pop, R&B" +Baby Bash,Suga Suga,239026,False,2003,73,0.662,0.748,5,-3.041,0,0.268,0.688,8.43e-06,0.0841,0.535,82.331,"hip hop, pop, R&B, latin" +Petey Pablo,Freek-A-Leek,235186,True,2003,56,0.737,0.697,2,-3.716,1,0.254,0.0756,0.0,0.359,0.662,104.917,"hip hop, pop" +JAY-Z,Dirt Off Your Shoulder,245173,True,2003,53,0.779,0.655,1,-7.147,1,0.318,0.0319,2.63e-06,0.339,0.645,163.971,hip hop +Juanes,La Camisa Negra,216706,False,2004,70,0.751,0.731,6,-4.419,0,0.0308,0.0838,0.0,0.0556,0.973,97.007,"pop, latin" +Avril Lavigne,Nobody's Home,212413,False,2004,61,0.348,0.907,5,-3.66,0,0.0497,0.000516,0.0,0.161,0.177,185.406,pop +Robbie Williams,Radio,233600,False,2004,33,0.607,0.874,5,-5.409,0,0.0349,0.00331,0.0494,0.26,0.846,130.015,"pop, rock" +Black Eyed Peas,Hey Mama,214893,True,2003,49,0.86,0.866,4,-6.345,0,0.232,0.104,1.42e-06,0.584,0.933,100.15,"hip hop, pop" +Ying Yang Twins,Salt Shaker,252440,True,2003,61,0.884,0.697,11,-6.022,1,0.128,0.0509,0.0,0.094,0.322,101.965,"hip hop, pop" +Enrique Iglesias,Not In Love - Radio Mix,223133,False,2003,37,0.762,0.885,0,-5.096,0,0.0395,0.0371,0.0,0.0519,0.886,117.021,"pop, latin" +Kelis,Milkshake,182626,False,2003,68,0.881,0.774,1,-6.068,1,0.0439,0.00986,0.0346,0.206,0.759,112.968,"pop, R&B, Dance/Electronic" +Linkin Park,Breaking the Habit,196906,False,2003,68,0.579,0.849,4,-5.218,0,0.0303,0.108,0.0,0.0909,0.581,100.021,"rock, metal" +JoJo,Leave (Get Out) - Radio Edit,242746,False,2005,49,0.656,0.513,5,-8.691,1,0.253,0.156,6.45e-05,0.0763,0.464,86.891,"hip hop, pop, R&B" +Christina Milian,Dip It Low,197186,False,2004,45,0.752,0.722,11,-4.207,0,0.104,0.0108,0.000536,0.0753,0.612,169.131,"hip hop, pop, R&B" +Avant,Read Your Mind,263506,False,2003,56,0.755,0.381,8,-6.696,0,0.0711,0.291,5.13e-06,0.0974,0.695,73.108,"pop, R&B" +Eminem,Just Lose It,248680,True,2004,67,0.94,0.633,8,-3.56,1,0.0467,0.0581,4.04e-05,0.281,0.962,121.003,hip hop +Sugababes,Too Lost In You,237693,False,2003,58,0.586,0.705,1,-4.904,0,0.0287,0.0485,0.0,0.15,0.459,98.014,"pop, R&B" +Snoop Dogg,Drop It Like It's Hot,266066,True,2004,39,0.892,0.628,1,-3.832,1,0.216,0.169,0.0,0.102,0.676,92.063,"hip hop, pop" +Sean Paul,I'm Still in Love with You (feat. Sasha),273360,False,2002,68,0.765,0.666,7,-5.384,1,0.172,0.102,0.0,0.116,0.756,87.002,"hip hop, pop" +The Shapeshifters,Lola's Theme - Radio Edit,207066,False,2004,65,0.748,0.845,5,-4.612,0,0.0536,0.00083,0.000225,0.068,0.659,123.925,Dance/Electronic +D12,How Come,249533,True,2004,57,0.745,0.858,9,-2.221,1,0.237,0.28,0.0,0.188,0.547,89.983,"hip hop, pop, rock" +Basement Jaxx,Good Luck,282306,False,2003,0,0.571,0.968,5,-3.092,1,0.162,0.0649,3.07e-06,0.592,0.313,154.07,"pop, Dance/Electronic" +Kevin Lyttle,Turn Me On,192106,False,2004,67,0.677,0.682,9,-6.879,1,0.0361,0.0405,0.0,0.0351,0.875,106.279,hip hop +Chingy,One Call Away,276800,False,2003,61,0.765,0.821,4,-5.926,0,0.183,0.0918,0.0,0.217,0.962,162.519,"hip hop, pop, R&B" +T.I.,Bring Em Out,216706,True,2004,63,0.759,0.891,11,-2.983,1,0.257,0.0298,0.0,0.141,0.587,98.579,"hip hop, pop" +DJ Casper,Cha Cha Slide - Hardino Mix,222146,False,2004,51,0.853,0.911,11,-6.722,0,0.125,0.0436,0.00571,0.287,0.802,131.012,set() +Kylie Minogue,I Believe in You,200973,False,2004,47,0.548,0.785,7,-5.087,1,0.0704,0.00277,0.048,0.329,0.418,120.939,"pop, Dance/Electronic" +Twista,Overnight Celebrity,233360,True,2004,66,0.828,0.792,10,-5.435,0,0.179,0.182,0.0,0.281,0.777,134.199,"hip hop, pop, R&B" +N.E.R.D,She Wants To Move,213786,False,2004,57,0.766,0.851,5,-4.831,1,0.0786,0.00402,0.000744,0.256,0.8,115.012,hip hop +Britney Spears,Everytime,230306,False,2003,63,0.398,0.284,3,-12.852,1,0.0337,0.966,8.57e-05,0.116,0.114,109.599,pop +U2,Vertigo,193520,False,2004,64,0.416,0.819,9,-3.974,1,0.0613,0.000138,0.00108,0.147,0.632,140.083,"World/Traditional, rock" +Franz Ferdinand,Take Me Out,237026,False,2004,77,0.277,0.663,4,-8.821,0,0.0377,0.000409,0.00051,0.136,0.527,104.561,rock +J-Kwon,Tipsy - Club Mix,247106,True,2001,59,0.925,0.741,9,-5.827,0,0.315,0.0653,0.0,0.044,0.654,93.04,"hip hop, pop" +G-Unit,Wanna Get To Know You,265026,True,2003,61,0.513,0.826,10,-3.651,0,0.132,0.106,0.0,0.0675,0.889,76.91,"hip hop, pop" +Lil' Flip,Sunshine (feat. Lea),225173,True,2004,42,0.814,0.387,0,-9.867,1,0.0946,0.0248,0.0,0.131,0.792,93.961,"hip hop, pop" +Kanye West,Jesus Walks,193733,True,2004,73,0.637,0.834,3,-4.686,0,0.323,0.614,0.0,0.317,0.715,87.312,hip hop +Evanescence,My Immortal,262533,False,2003,72,0.19,0.265,9,-9.206,1,0.0356,0.863,0.0,0.134,0.101,79.012,metal +Nelly,My Place,336506,True,2004,57,0.6,0.641,1,-4.237,0,0.0423,0.0498,1.49e-06,0.0982,0.642,163.153,"hip hop, pop, R&B" +Lemar,If There's Any Justice,229080,False,2004,46,0.706,0.665,6,-5.348,0,0.0453,0.0128,0.0,0.338,0.639,89.993,R&B +No Doubt,It's My Life,226053,False,2003,60,0.612,0.735,8,-5.074,0,0.0282,0.00202,0.00118,0.328,0.783,126.326,"rock, pop" +Avril Lavigne,Don't Tell Me,202013,False,2004,58,0.523,0.795,4,-2.92,1,0.0386,0.00462,0.0,0.358,0.484,144.106,pop +Twista,Slow Jamz,316053,True,2004,72,0.559,0.733,2,-5.659,0,0.171,0.314,0.0,0.284,0.611,145.115,"hip hop, pop, R&B" +Green Day,American Idiot,176346,True,2004,77,0.38,0.988,1,-2.042,1,0.0639,2.64e-05,7.86e-05,0.368,0.769,186.113,rock +Kelis,Millionaire,224933,False,2003,57,0.68,0.847,11,-6.636,1,0.108,0.0217,0.0,0.0374,0.75,176.051,"pop, R&B, Dance/Electronic" +Scissor Sisters,Comfortably Numb,266040,False,2004,43,0.803,0.924,7,-6.466,1,0.0391,0.237,0.0167,0.104,0.788,116.972,"pop, rock, Dance/Electronic" +Alicia Keys,Diary (feat. Tony! Toni! Tone! & Jermaine Paul),284160,False,2003,55,0.663,0.465,8,-9.711,0,0.0317,0.555,0.00183,0.125,0.279,127.932,"pop, R&B" +Kanye West,Through The Wire,221226,True,2004,76,0.571,0.739,7,-6.11,1,0.247,0.00865,5.21e-06,0.158,0.66,83.089,hip hop +Special D.,Come With Me - Radio Edit,185133,False,2004,61,0.739,0.999,7,-5.077,1,0.0803,0.13,0.00224,0.28,0.501,139.982,pop +Ludacris,Splash Waterfalls,290760,True,2003,48,0.734,0.846,11,-6.102,1,0.409,0.101,0.0,0.233,0.702,145.894,"hip hop, pop" +Avril Lavigne,My Happy Ending,242413,True,2004,68,0.414,0.936,2,-2.407,1,0.0758,0.00136,0.0,0.369,0.74,170.229,pop +Dido,White Flag,240040,False,2003,44,0.512,0.525,5,-6.823,1,0.0401,0.327,3.3e-06,0.081,0.294,169.951,pop +Eamon,Fuck It (I Don't Want You Back),225106,True,2004,64,0.828,0.653,0,-6.245,0,0.0653,0.214,3.25e-06,0.0404,0.575,68.507,set() +Beyoncé,"Me, Myself and I",301133,False,2003,62,0.75,0.458,1,-9.092,1,0.0803,0.226,3.25e-05,0.137,0.536,83.61,"pop, R&B" +3 Doors Down,Here Without You,238733,False,2002,74,0.557,0.533,10,-6.817,0,0.0252,0.0492,0.0,0.205,0.233,143.994,"pop, rock, metal" +Kanye West,All Falls Down,223506,True,2004,80,0.657,0.734,8,-4.832,0,0.484,0.149,0.0,0.139,0.434,91.03,hip hop +Twista,Sunshine (feat. Anthony Hamilton),226013,True,2004,56,0.926,0.888,11,-5.337,1,0.294,0.261,0.0,0.348,0.819,98.523,"hip hop, pop, R&B" +Katie Melua,The Closest Thing to Crazy,252466,False,2003,55,0.562,0.219,4,-13.2,1,0.0312,0.856,0.000296,0.0979,0.106,127.831,"pop, easy listening, jazz" +Usher,My Boo,223440,False,2004,76,0.662,0.507,5,-8.238,1,0.118,0.257,0.0,0.0465,0.676,86.412,"hip hop, pop, R&B" +Lloyd,Southside,277840,False,2004,59,0.688,0.404,9,-9.481,0,0.0818,0.377,0.0,0.0813,0.448,75.007,"hip hop, pop, R&B" +LMC,Take Me To The Clouds Above - LMC Vs. U2 / Radio Edit,171546,False,2004,59,0.668,0.905,6,-5.395,1,0.038,0.00232,0.0331,0.141,0.667,128.658,Dance/Electronic +The Streets,Fit but You Know It,254266,True,2004,58,0.68,0.844,6,-1.729,1,0.299,0.23,0.0,0.0305,0.895,172.302,"hip hop, Dance/Electronic" +JUVENILE,Slow Motion,248200,True,2003,64,0.713,0.734,0,-4.894,0,0.325,0.548,0.0,0.0687,0.89,172.872,"hip hop, pop" +Outkast,The Way You Move (feat. Sleepy Brown),234000,True,2003,66,0.871,0.597,5,-4.932,0,0.0464,0.126,0.000113,0.0638,0.635,125.999,"hip hop, pop" +Girls Aloud,Love Machine,205360,False,2004,59,0.663,0.95,7,-4.21,1,0.0621,0.00127,0.00492,0.0753,0.697,116.02,"pop, Dance/Electronic" +Jessica Simpson,With You,191826,False,2003,57,0.553,0.756,0,-4.583,1,0.108,0.0923,0.0,0.215,0.605,94.487,"pop, R&B" +Eminem,Like Toy Soldiers,296880,True,2004,48,0.52,0.768,8,-3.489,0,0.359,0.0193,0.00034,0.104,0.398,79.178,hip hop +Gary Jules,Mad World (Feat. Michael Andrews),189506,False,2001,65,0.345,0.0581,3,-17.217,1,0.0374,0.976,0.000366,0.103,0.304,174.117,pop +Ashlee Simpson,Pieces Of Me,217440,False,2004,56,0.505,0.799,2,-4.286,1,0.0773,0.0572,0.0,0.11,0.772,174.001,pop +Will Young,Leave Right Now,214733,False,2003,55,0.641,0.445,6,-8.674,1,0.0368,0.145,0.0,0.108,0.383,81.931,pop +Alicia Keys,You Don't Know My Name,366733,False,2003,61,0.264,0.663,9,-8.264,1,0.187,0.59,0.00443,0.342,0.249,167.078,"pop, R&B" +Mariah Carey,We Belong Together,201400,False,2005,69,0.84,0.476,0,-7.918,1,0.0629,0.0264,0.0,0.0865,0.767,139.987,"pop, R&B" +Mario,Let Me Love You,256733,False,2004,72,0.656,0.578,7,-8.97,0,0.0922,0.235,0.0,0.118,0.556,94.514,"pop, R&B" +Kanye West,Gold Digger,207626,True,2005,78,0.629,0.696,1,-5.572,0,0.348,0.0195,0.0,0.0554,0.623,93.034,hip hop +Rihanna,Pon de Replay,246960,False,2005,77,0.779,0.64,7,-8.415,1,0.159,0.000155,0.00077,0.101,0.498,99.019,"hip hop, pop, R&B" +50 Cent,Candy Shop,209106,True,2005,79,0.614,0.574,11,-7.961,1,0.466,0.0253,3.2e-05,0.38,0.755,125.173,"hip hop, pop" +James Blunt,You're Beautiful,209493,True,2005,75,0.675,0.479,0,-9.87,0,0.0278,0.633,1.76e-05,0.088,0.454,81.998,pop +Green Day,Boulevard of Broken Dreams,261266,True,2004,72,0.49,0.679,8,-3.68,1,0.0309,0.00394,1.54e-05,0.0383,0.506,167.046,rock +The Pussycat Dolls,Don't Cha,272080,False,2005,72,0.875,0.631,1,-3.475,1,0.099,0.00542,2.54e-06,0.127,0.549,120.003,"pop, R&B" +Daniel Powter,Bad Day,233640,False,2005,74,0.599,0.785,3,-4.013,1,0.0309,0.448,0.00336,0.151,0.52,140.046,pop +Akon,Bananza (Belly Dancer),238493,False,2004,28,0.878,0.699,10,-5.897,0,0.132,0.0345,3.37e-06,0.755,0.666,104.838,pop +Madonna,Hung Up,337733,False,2005,74,0.649,0.647,9,-7.695,0,0.0452,0.0039,0.161,0.0686,0.405,125.02,pop +50 Cent,Just A Lil Bit,237706,True,2005,76,0.489,0.692,1,-6.672,1,0.41,0.0322,0.00608,0.315,0.527,96.946,"hip hop, pop" +Crazy Frog,Axel F,168879,False,2005,67,0.86,0.907,2,-3.321,1,0.0407,0.279,0.693,0.0648,0.786,138.045,pop +Eminem,Ass Like That,265480,True,2004,52,0.646,0.796,2,-6.152,1,0.366,0.409,0.0,0.107,0.676,83.093,hip hop +Sean Paul,We Be Burnin',213066,False,2005,59,0.95,0.803,1,-4.195,1,0.151,0.145,0.0,0.0748,0.86,117.0,"hip hop, pop" +Gwen Stefani,Rich Girl,236213,False,2004,68,0.856,0.754,7,-2.728,0,0.0416,0.0294,1.49e-05,0.206,0.722,98.018,"pop, R&B" +The Chemical Brothers,Galvanize,393813,False,2005,63,0.745,0.714,7,-3.681,1,0.0751,0.0141,0.0222,0.363,0.365,104.003,Dance/Electronic +Fort Minor,Remember the Name (feat. Styles of Beyond),230493,True,2005,73,0.688,0.835,8,-4.162,1,0.0911,0.0583,2.87e-06,0.0795,0.88,84.858,"hip hop, pop, rock" +50 Cent,Disco Inferno,214226,True,2005,72,0.925,0.659,3,-4.763,1,0.231,0.206,7.88e-05,0.279,0.71,97.018,"hip hop, pop" +The Game,How We Do,235533,True,2005,74,0.862,0.648,4,-7.401,0,0.251,0.0455,0.0,0.0332,0.637,98.012,"hip hop, pop" +Chris Brown,Run It! (feat. Juelz Santana),229866,False,2005,71,0.846,0.482,1,-6.721,0,0.129,0.0246,0.0,0.393,0.212,100.969,"hip hop, pop, R&B" +Mariah Carey,It's Like That,203360,False,2005,0,0.8,0.633,8,-4.875,0,0.0514,0.0901,0.0,0.0315,0.836,95.953,"pop, R&B" +Black Eyed Peas,My Humps,326960,False,2005,67,0.802,0.682,1,-5.924,0,0.222,0.111,1.39e-05,0.109,0.586,123.95,"hip hop, pop" +Ciara,Oh (feat. Ludacris),256346,False,2004,61,0.8,0.496,7,-7.135,1,0.0506,0.000298,0.0,0.0697,0.357,128.29,"pop, R&B" +50 Cent,Outta Control - Remix,247506,True,2005,60,0.772,0.599,8,-5.996,0,0.246,0.0277,0.0,0.0839,0.557,92.029,"hip hop, pop" +Baby Bash,"Baby, I'm Back",219920,True,2005,4,0.899,0.365,9,-5.461,0,0.105,0.0508,0.0,0.097,0.749,100.01,"hip hop, pop, R&B, latin" +Jennifer Lopez,Get Right,225533,False,2005,65,0.741,0.759,0,-5.096,0,0.12,0.0218,0.0,0.628,0.362,97.084,"hip hop, pop, R&B" +JAY-Z,Numb / Encore,205733,True,2004,76,0.687,0.793,2,-4.254,1,0.166,0.0603,0.0,0.582,0.751,107.045,hip hop +The Game,Hate It Or Love It,206400,True,2005,77,0.802,0.785,9,-4.781,1,0.207,0.14,0.0,0.123,0.435,99.998,"hip hop, pop" +Gorillaz,Feel Good Inc.,222640,False,2005,82,0.818,0.705,6,-6.679,1,0.177,0.00836,0.00233,0.613,0.772,138.559,hip hop +Black Eyed Peas,Don't Phunk With My Heart,239773,False,2005,62,0.69,0.928,5,-2.76,0,0.061,0.00937,0.0,0.547,0.604,130.889,"hip hop, pop" +Akon,Lonely,235800,True,2004,21,0.629,0.532,5,-7.88,0,0.0352,0.331,0.0,0.238,0.619,90.098,pop +Bodyrockers,I Like The Way - Radio Edit,200053,False,2005,54,0.642,0.851,6,-3.638,1,0.0431,0.0248,0.0146,0.084,0.777,127.988,"rock, Dance/Electronic" +Sugababes,Push The Button,218093,False,2005,64,0.962,0.66,8,-5.096,1,0.061,0.0485,0.0,0.076,0.814,126.016,"pop, R&B" +Jeezy,Soul Survivor,280013,True,2005,55,0.684,0.493,10,-7.728,1,0.0459,0.000839,0.0,0.157,0.08,84.03,"hip hop, pop" +Eminem,Mockingbird,250760,True,2004,77,0.637,0.678,0,-3.798,1,0.266,0.209,0.0,0.156,0.254,84.039,hip hop +Shakira,La Tortura (feat. Alejandro Sanz),212893,False,2005,72,0.74,0.783,0,-5.367,1,0.0427,0.0297,0.000308,0.123,0.812,100.011,"pop, latin" +Gorillaz,DARE,244999,False,2005,73,0.76,0.891,11,-5.852,0,0.0372,0.0229,0.0869,0.298,0.966,120.264,hip hop +Snoop Dogg,Signs,236813,True,2004,54,0.94,0.713,7,-5.308,1,0.127,0.0319,0.00076,0.325,0.666,112.955,"hip hop, pop" +Robbie Williams,Tripping,276603,False,2005,58,0.666,0.922,2,-4.458,0,0.0429,0.0381,0.000191,0.0633,0.828,118.014,"pop, rock" +David Banner,Play,230133,True,2005,52,0.837,0.658,1,-8.013,0,0.126,0.000213,0.122,0.243,0.586,95.027,"hip hop, pop" +Trillville,Some Cut,283454,True,2004,50,0.9,0.515,1,-6.626,1,0.372,0.00341,0.0,0.111,0.659,84.995,"hip hop, pop" +D4L,Laffy Taffy,224253,True,2005,63,0.891,0.439,7,-7.994,0,0.428,0.0351,0.0,0.0932,0.622,77.499,"hip hop, pop" +Missy Elliott,Lose Control (feat. Ciara & Fat Man Scoop),226863,True,2005,67,0.904,0.813,4,-7.105,0,0.121,0.0311,0.00697,0.0471,0.81,125.461,"hip hop, pop, R&B" +Rob Thomas,Lonely No More,226640,False,2005,56,0.551,0.896,9,-3.152,0,0.109,0.033,0.0,0.0899,0.858,171.79,"pop, Folk/Acoustic" +Brian McFadden,Almost Here,229826,False,2004,42,0.561,0.452,9,-7.324,0,0.0336,0.409,0.0,0.106,0.316,77.984,set() +Bobby V.,"Slow Down - 12"" Version",258666,False,2005,65,0.568,0.732,11,-4.896,0,0.103,0.179,4.92e-06,0.401,0.626,186.048,"hip hop, pop, R&B" +Backstreet Boys,Incomplete,239586,False,2005,63,0.437,0.589,4,-4.834,1,0.0331,0.231,0.0,0.0768,0.165,133.631,pop +Pretty Ricky,Grind With Me,237733,True,2005,63,0.7,0.538,1,-5.952,1,0.045,0.000204,0.0,0.105,0.189,136.518,"hip hop, pop, R&B" +Kanye West,Diamonds From Sierra Leone - Remix,233400,True,2005,61,0.672,0.814,7,-3.768,1,0.27,0.258,0.0,0.164,0.379,97.043,hip hop +Snoop Dogg,Drop It Like It's Hot,266066,True,2004,39,0.892,0.628,1,-3.832,1,0.216,0.169,0.0,0.102,0.676,92.063,"hip hop, pop" +Fat Joe,Get It Poppin' (feat. Nelly) - Radio Version,211320,False,2005,41,0.905,0.55,1,-7.558,1,0.372,0.0628,0.0,0.0644,0.52,100.813,"hip hop, pop" +Destiny's Child,Soldier (feat. T.I. & Lil' Wayne),325573,False,2004,63,0.878,0.417,7,-6.799,1,0.361,0.0444,0.0,0.0833,0.904,77.49,"pop, R&B" +Ciara,"1, 2 Step (feat. Missy Elliott) - Main",202213,False,2005,52,0.939,0.498,5,-10.94,0,0.161,0.0444,0.00128,0.0475,0.801,113.053,"pop, R&B" +Amerie,1 Thing,238746,False,2008,52,0.636,0.946,10,-4.683,0,0.332,0.115,3.75e-05,0.0416,0.891,125.085,"hip hop, pop, R&B" +Ludacris,Get Back,270746,True,2004,59,0.555,0.639,1,-5.16,1,0.263,0.0594,0.0,0.118,0.184,171.609,"hip hop, pop" +Franz Ferdinand,Do You Want To,215000,False,2005,61,0.412,0.944,0,-2.896,1,0.0448,0.0724,0.000493,0.801,0.79,123.091,rock +Natalie Imbruglia,Shiver,222653,False,2005,52,0.489,0.798,1,-4.49,1,0.0316,0.00115,2.81e-06,0.149,0.309,94.163,"pop, Folk/Acoustic" +Gwen Stefani,Hollaback Girl,199853,True,2004,69,0.926,0.916,10,-2.221,0,0.0929,0.35,6.17e-06,0.0234,0.904,110.007,"pop, R&B" +Pretty Ricky,Your Body,240040,True,2005,59,0.838,0.594,4,-5.695,0,0.113,0.176,0.0,0.0867,0.507,90.038,"hip hop, pop, R&B" +Thirty Seconds To Mars,The Kill,231533,False,2005,74,0.179,0.912,4,-3.881,0,0.0791,0.0014,0.000294,0.582,0.289,182.99,rock +Alicia Keys,Karma,256000,False,2003,50,0.727,0.736,10,-6.203,1,0.0615,0.0743,0.00311,0.0348,0.687,85.098,"pop, R&B" +2Pac,Ghetto Gospel,238053,True,2004,68,0.794,0.614,5,-5.352,0,0.0467,0.0964,0.0,0.0788,0.663,80.569,hip hop +Craig David,Don't Love You No More (I'm Sorry),243986,False,2005,56,0.636,0.558,7,-7.076,1,0.0245,0.462,0.0,0.062,0.572,100.113,"hip hop, pop, R&B" +Katie Melua,Nine Million Bicycles,197160,False,2005,60,0.534,0.247,0,-15.636,1,0.0322,0.516,0.00859,0.122,0.344,82.168,"pop, easy listening, jazz" +Lil Jon & The East Side Boyz,Lovers And Friends,260600,True,2004,66,0.675,0.501,10,-6.183,1,0.22,0.0541,0.0,0.429,0.532,139.861,"hip hop, pop" +Mariah Carey,Don't Forget About Us - Radio Edit,233866,False,2005,0,0.7,0.529,10,-6.816,0,0.0395,0.0422,0.0,0.101,0.386,143.555,"pop, R&B" +Ying Yang Twins,Wait (The Whisper Song),179160,True,2005,57,0.933,0.513,7,-13.203,1,0.347,0.00112,0.00248,0.107,0.595,102.017,"hip hop, pop" +Will Smith,Switch,197666,False,2020,34,0.873,0.9,5,-4.325,0,0.18,0.0107,0.000502,0.556,0.478,102.516,"hip hop, pop" +Bow Wow,Like You (feat. Ciara),205840,False,2005,66,0.714,0.6,6,-8.365,1,0.0876,0.0465,0.0,0.0646,0.504,82.017,"hip hop, pop, R&B" +Kaiser Chiefs,Everyday I Love You Less And Less,217706,False,2005,58,0.49,0.956,1,-4.556,0,0.0407,0.00466,6.15e-06,0.242,0.577,160.028,rock +Kelly Clarkson,Behind These Hazel Eyes,198973,False,2004,59,0.548,0.889,9,-4.682,1,0.0382,0.00165,0.00109,0.197,0.425,90.048,"pop, R&B" +Black Eyed Peas,Don't Lie,219000,False,2005,61,0.662,0.785,9,-5.299,1,0.186,0.16,0.0,0.0784,0.604,89.885,"hip hop, pop" +Westlife,You Raise Me Up,241066,False,2005,1,0.129,0.358,10,-6.596,1,0.0314,0.631,0.0,0.0886,0.172,75.048,pop +Ray J,One Wish,337640,False,2005,62,0.525,0.652,5,-7.042,0,0.28,0.278,0.0,0.339,0.602,127.673,"hip hop, pop, R&B" +Papa Roach,Scars,208199,False,2004,60,0.455,0.929,11,-3.295,1,0.0494,0.0875,0.0,0.226,0.484,89.782,"rock, metal" +Arctic Monkeys,I Bet You Look Good On The Dancefloor,173680,False,2006,71,0.535,0.948,6,-4.19,0,0.0356,0.00225,0.0,0.376,0.778,103.183,rock +Shayne Ward,That's My Goal,219600,False,2006,1,0.286,0.508,2,-5.966,1,0.0336,0.372,0.0,0.27,0.14,68.637,pop +Bow Wow,Let Me Hold You (feat. Omarion),248493,False,2005,62,0.815,0.662,6,-8.244,1,0.199,0.0915,0.000409,0.0898,0.611,151.518,"hip hop, pop, R&B" +Kaiser Chiefs,I Predict A Riot,233186,False,2005,63,0.395,0.979,8,-3.241,1,0.0766,0.0243,0.0,0.12,0.41,158.842,rock +Usher,Caught Up,224640,False,2004,61,0.817,0.806,6,-4.606,1,0.0623,0.027,9e-06,0.119,0.805,110.086,"hip hop, pop, R&B" +Kelly Clarkson,Because of You,219493,False,2004,71,0.587,0.583,5,-5.284,0,0.0313,0.248,0.0,0.124,0.15,139.92,"pop, R&B" +3 Doors Down,Let Me Go,243053,False,2012,52,0.478,0.863,1,-4.914,0,0.0387,0.0181,0.0,0.111,0.491,92.004,"pop, rock, metal" +Nickelback,Photograph,258920,False,2005,67,0.515,0.876,8,-3.756,1,0.0292,0.000932,0.000166,0.136,0.385,145.916,"rock, metal" +T-Pain,I'm Sprung,231040,False,2005,0,0.722,0.329,0,-11.617,0,0.108,0.088,0.0,0.081,0.166,99.991,"hip hop, pop, R&B" +Destiny's Child,Cater 2 U,245400,False,2004,58,0.605,0.584,11,-7.043,0,0.197,0.286,0.0,0.235,0.464,125.802,"pop, R&B" +The Pussycat Dolls,Stickwitu,207506,False,2005,67,0.548,0.554,9,-6.408,1,0.0587,0.283,0.0,0.0708,0.382,79.918,"pop, R&B" +Ryan Cabrera,True,204173,False,2004,62,0.598,0.4,6,-10.054,1,0.0241,0.459,0.0,0.151,0.369,96.938,pop +Stereophonics,Dakota,297426,False,2005,69,0.506,0.93,4,-3.499,1,0.0675,0.129,0.00682,0.0931,0.305,146.994,"rock, pop" +Eminem,Like Toy Soldiers,296880,True,2004,67,0.52,0.768,8,-3.489,0,0.359,0.0193,0.00034,0.104,0.398,79.178,hip hop +Nelly,Over And Over,253933,True,2004,61,0.644,0.517,7,-10.02,1,0.0803,0.0065,1.37e-06,0.158,0.464,169.799,"hip hop, pop, R&B" +Coldplay,Speed of Sound,287906,False,2005,69,0.514,0.898,11,-6.765,0,0.0577,0.00488,0.0345,0.0746,0.353,123.067,"rock, pop" +KT Tunstall,Suddenly I See,201706,False,2005,71,0.587,0.767,0,-5.713,1,0.0449,0.225,0.0,0.112,0.664,100.38,"World/Traditional, pop, Folk/Acoustic" +Destiny's Child,Girl,224146,False,2004,55,0.567,0.747,6,-6.019,0,0.105,0.31,0.0,0.04,0.556,89.036,"pop, R&B" +Kelly Clarkson,Since U Been Gone,188960,False,2004,71,0.662,0.739,9,-5.354,0,0.0322,0.00206,0.0603,0.113,0.382,130.999,"pop, R&B" +U2,Sometimes You Can't Make It On Your Own,305080,False,2004,56,0.53,0.597,2,-5.652,1,0.0247,0.00275,0.473,0.105,0.346,96.0,"World/Traditional, rock" +The All-American Rejects,Dirty Little Secret,193653,False,2005,65,0.469,0.955,10,-4.253,1,0.0432,0.000343,1.35e-06,0.548,0.462,143.853,"rock, pop" +Toby Keith,As Good As I Once Was,227840,False,2005,67,0.68,0.87,5,-4.461,1,0.0393,0.266,0.0,0.109,0.646,119.075,country +Gwen Stefani,Cool,189333,False,2004,59,0.709,0.721,2,-3.52,1,0.0281,0.0322,0.000309,0.355,0.801,112.019,"pop, R&B" +Foo Fighters,Best of You,255626,False,2005,76,0.366,0.94,1,-5.119,0,0.0696,0.000769,9.42e-05,0.188,0.369,130.198,"rock, metal" +Shakira,Hips Don't Lie (feat. Wyclef Jean),218093,False,2005,82,0.778,0.824,10,-5.892,0,0.0707,0.284,0.0,0.405,0.758,100.024,"pop, latin" +Sean Paul,Temperature,218573,False,2005,78,0.951,0.6,0,-4.675,0,0.0685,0.106,0.0,0.0712,0.822,125.04,"hip hop, pop" +Justin Timberlake,SexyBack (feat. Timbaland),242733,True,2006,78,0.967,0.583,7,-5.562,0,0.0789,0.0584,0.0,0.0519,0.964,117.0,pop +Beyoncé,Irreplaceable,227853,False,2007,70,0.447,0.694,7,-4.637,0,0.382,0.0293,5.46e-06,0.167,0.509,175.868,"pop, R&B" +Amy Winehouse,Rehab,214946,False,2006,71,0.407,0.558,0,-13.609,1,0.0552,0.0541,1.51e-06,0.34,0.777,71.815,R&B +Rihanna,SOS,238920,False,2006,73,0.677,0.671,7,-4.905,1,0.0361,0.004,5.07e-05,0.413,0.527,137.046,"hip hop, pop, R&B" +Gnarls Barkley,Crazy,177466,False,2006,74,0.855,0.587,8,-4.589,1,0.0336,0.0505,0.00714,0.105,0.64,111.97,"hip hop, R&B" +Busta Rhymes,Touch It,214960,True,2006,61,0.658,0.551,1,-6.62,1,0.37,0.0043,0.000108,0.197,0.442,138.335,"hip hop, pop" +Nelly Furtado,Maneater,258893,False,2006,79,0.796,0.777,6,-4.81,1,0.0397,0.0261,0.000358,0.121,0.787,132.722,"hip hop, pop, latin" +Bob Sinclar,Rock This Party - Everybody Dance Now,245466,False,2006,69,0.893,0.823,5,-3.708,0,0.122,0.0714,0.0,0.0509,0.902,128.009,"pop, Dance/Electronic" +Diddy,Come to Me (feat. Nicole Scherzinger),276786,True,2006,55,0.819,0.485,6,-6.45,0,0.0756,0.0176,0.0,0.658,0.386,96.998,"hip hop, pop" +Madonna,Sorry,281880,False,2005,61,0.589,0.877,0,-6.229,0,0.046,0.00121,0.013,0.169,0.443,132.971,pop +Chamillionaire,Ridin',303053,True,2005,71,0.787,0.799,8,-4.68,0,0.0993,0.189,0.0,0.3,0.835,143.052,"hip hop, pop" +Black Eyed Peas,Pump It,213066,False,2005,75,0.648,0.931,1,-3.15,0,0.181,0.00937,0.0,0.752,0.744,153.649,"hip hop, pop" +Nelly Furtado,Promiscuous,242293,False,2006,82,0.808,0.97,10,-6.098,0,0.0506,0.0569,6.13e-05,0.154,0.868,114.328,"hip hop, pop, latin" +Fedde Le Grand,Put Your Hands Up for Detroit - Radio Edit,150533,False,2015,39,0.814,0.923,7,-4.525,1,0.229,0.0154,0.154,0.0862,0.518,127.919,"pop, Dance/Electronic" +Sean Paul,(When You Gonna) Give It Up to Me (feat. Keyshia Cole) - Radio Version,243880,False,2006,58,0.711,0.761,8,-3.04,1,0.225,0.067,0.0,0.041,0.718,95.824,"hip hop, pop" +The Pussycat Dolls,Buttons,225560,False,2005,68,0.565,0.817,2,-4.338,1,0.27,0.152,0.0,0.247,0.477,210.851,"pop, R&B" +Ciara,Get Up (feat. Chamillionaire),261880,False,2006,59,0.964,0.595,1,-6.887,0,0.109,0.0248,5.14e-06,0.0405,0.629,128.593,"pop, R&B" +Eminem,Shake That,274440,True,2005,74,0.963,0.643,1,-5.785,0,0.117,0.0507,4.94e-05,0.157,0.534,107.005,hip hop +Cherish,Do It To It,226293,False,2006,65,0.826,0.67,2,-5.559,1,0.169,0.00798,0.0,0.495,0.794,74.007,"hip hop, pop, R&B" +Muse,Supermassive Black Hole,212439,False,2006,75,0.668,0.921,7,-3.727,1,0.0439,0.0492,0.00517,0.0877,0.782,120.0,rock +Chris Brown,Gimme That,186826,False,2005,63,0.678,0.572,11,-7.033,0,0.0466,0.00374,6.59e-05,0.118,0.444,87.165,"hip hop, pop, R&B" +Infernal,From Paris to Berlin,209666,False,2004,57,0.741,0.869,9,-6.534,0,0.0816,0.096,0.0,0.0396,0.839,126.069,pop +Cassie,Me & U,192213,False,2006,73,0.803,0.454,8,-4.802,0,0.0294,0.352,0.0,0.0655,0.739,99.99,"pop, R&B" +E-40,U and Dat (feat. T. Pain & Kandi Girl),202720,True,2006,63,0.858,0.484,6,-7.448,0,0.277,0.0241,1.8e-06,0.123,0.329,99.992,"hip hop, pop" +The Pussycat Dolls,Beep,229360,False,2005,57,0.938,0.735,7,-6.382,1,0.0434,0.00952,0.0,0.0998,0.55,103.7,"pop, R&B" +Lil Jon,Snap Yo Fingers,274386,False,2006,67,0.835,0.757,8,-5.029,0,0.23,0.00647,0.0,0.29,0.626,82.038,"hip hop, pop" +Armand Van Helden,My My My - Radio Edit,190000,False,2006,40,0.678,0.768,7,-8.502,1,0.137,0.0195,0.00866,0.0322,0.758,127.51,"pop, Dance/Electronic" +The Notorious B.I.G.,"Nasty Girl (feat. Diddy, Nelly, Jagged Edge & Avery Storm) - 2005 Remaster",286186,True,2005,68,0.833,0.628,2,-7.041,1,0.141,0.0949,1.6e-06,0.282,0.645,106.328,hip hop +Yung Joc,It's Goin' Down (feat. Nitti),241840,True,2006,66,0.888,0.577,1,-7.702,0,0.0612,0.0986,0.0,0.131,0.609,84.003,"hip hop, pop" +Shayne Ward,No Promises,223066,False,2006,1,0.5,0.498,4,-6.087,1,0.026,0.527,0.0,0.123,0.182,79.798,pop +Justin Timberlake,My Love (feat. T.I.),276160,False,2006,72,0.771,0.68,11,-5.881,0,0.224,0.277,1.18e-05,0.682,0.808,119.952,pop +Nickelback,Savin' Me,219320,False,2005,65,0.441,0.815,3,-4.088,1,0.0276,0.000731,0.0,0.414,0.522,164.007,"rock, metal" +Nelly,Grillz,271160,False,2005,60,0.867,0.504,4,-7.737,0,0.241,0.0358,0.0,0.307,0.84,82.996,"hip hop, pop, R&B" +Take That,Patience,202066,False,2006,64,0.309,0.783,10,-4.154,1,0.0405,0.142,1.11e-06,0.118,0.372,175.788,pop +Fergie,London Bridge,241306,True,2006,57,0.769,0.609,1,-5.894,1,0.357,0.216,0.0,0.166,0.633,90.951,"pop, R&B" +Cascada,Everytime We Touch - Radio Edit,199120,False,2011,0,0.64,0.977,8,-5.369,1,0.0555,0.00461,1.68e-05,0.371,0.493,142.019,"hip hop, pop, Dance/Electronic" +D4L,Laffy Taffy,224253,True,2005,63,0.891,0.439,7,-7.994,0,0.428,0.0351,0.0,0.0932,0.622,77.499,"hip hop, pop" +LL Cool J,Control Myself,233973,False,2006,42,0.825,0.876,10,-6.556,0,0.211,0.00928,1.2e-05,0.205,0.48,120.066,"hip hop, pop, R&B" +Beyoncé,Deja Vu (feat. Jay-Z),240280,False,2007,57,0.644,0.746,7,-4.941,1,0.341,0.0071,1.22e-05,0.0743,0.355,105.253,"pop, R&B" +P!nk,Stupid Girls,197173,True,2006,0,0.683,0.886,4,-5.045,0,0.071,0.000738,0.00188,0.0524,0.566,100.04,pop +Eminem,When I'm Gone,281320,True,2005,68,0.618,0.746,10,-5.476,0,0.352,0.0553,0.0,0.273,0.75,75.272,hip hop +Kelis,Bossy,273973,False,2006,50,0.906,0.633,4,-5.316,1,0.19,0.000182,8.34e-06,0.058,0.779,84.021,"pop, R&B, Dance/Electronic" +Bob Sinclar,Love Generation - Radio Edit,207613,False,2005,49,0.715,0.812,1,-5.758,1,0.062,0.175,0.000134,0.044,0.554,128.04,"pop, Dance/Electronic" +Meck,Thunder in My Heart Again (Radio Edit),189800,False,2005,46,0.679,0.922,2,-4.67,0,0.0294,0.000241,0.69,0.0697,0.806,129.016,set() +Mary J. Blige,Be Without You - Kendu Mix,246333,True,2005,69,0.726,0.7,2,-5.881,0,0.0998,0.0717,0.0,0.262,0.673,146.559,"pop, R&B" +Girls Aloud,Something Kinda Ooooh,201590,False,2012,49,0.681,0.908,11,-3.918,0,0.0473,0.0232,0.0,0.355,0.879,131.925,"pop, Dance/Electronic" +Rihanna,Unfaithful,226973,False,2006,70,0.588,0.391,0,-8.607,0,0.0334,0.839,0.0,0.227,0.349,144.069,"hip hop, pop, R&B" +Kanye West,Touch The Sky,236600,True,2005,73,0.552,0.846,9,-4.912,1,0.342,0.0114,0.0,0.309,0.554,98.56,hip hop +Dem Franchize Boyz,"Lean Wit It, Rock Wit It",229813,True,2006,60,0.886,0.62,5,-5.854,1,0.307,0.082,0.0,0.1,0.6,76.035,"hip hop, pop" +Evanescence,Call Me When You're Sober,214706,False,2006,65,0.45,0.883,7,-4.094,1,0.0524,0.00193,0.0,0.293,0.328,93.41,metal +Chingy,Pullin' Me Back,234133,True,2006,62,0.786,0.511,7,-6.66,1,0.286,0.0825,0.0,0.12,0.587,80.0,"hip hop, pop, R&B" +Nickelback,Far Away,238173,False,2005,70,0.518,0.797,6,-5.153,1,0.0309,0.000681,0.0,0.107,0.293,132.918,"rock, metal" +Fall Out Boy,"Dance, Dance",180266,False,2005,74,0.622,0.961,11,-3.198,0,0.154,0.00523,0.0,0.0854,0.449,114.452,rock +Leona Lewis,A Moment Like This,257293,False,2007,57,0.261,0.562,8,-3.667,1,0.0301,0.423,7.54e-06,0.131,0.167,70.543,"pop, R&B" +T.I.,What You Know,274333,True,2006,64,0.551,0.81,11,-4.365,1,0.0344,0.0155,0.0,0.169,0.332,73.464,"hip hop, pop" +Ludacris,Money Maker,230613,True,2006,62,0.551,0.598,1,-6.79,1,0.27,0.15,0.0,0.299,0.612,78.756,"hip hop, pop" +Nick Lachey,What's Left Of Me,244613,False,2006,37,0.42,0.742,7,-5.665,1,0.0375,0.142,0.0,0.145,0.294,149.965,pop +Jibbs,Chain Hang Low,207586,True,2006,56,0.792,0.589,4,-6.869,0,0.262,0.0259,0.000128,0.114,0.778,157.147,"hip hop, pop" +Arctic Monkeys,When The Sun Goes Down,202133,True,2006,72,0.348,0.875,11,-4.758,0,0.199,0.0341,0.0,0.117,0.407,169.152,rock +Rascal Flatts,Life is a Highway,276773,False,2006,0,0.561,0.936,5,-5.409,1,0.0613,0.00185,0.0,0.102,0.594,103.055,country +Beyoncé,Check On It (feat. Slim Thug),211186,False,2006,45,0.7,0.887,7,-3.887,1,0.218,0.0805,0.0,0.376,0.883,166.104,"pop, R&B" +Keyshia Cole,Love,255333,False,2005,72,0.688,0.519,5,-4.285,1,0.0283,0.064,0.0,0.1,0.318,116.714,"hip hop, pop, R&B" +Lily Allen,Smile,196893,True,2006,68,0.632,0.639,5,-5.938,1,0.0259,0.00143,0.0,0.221,0.733,95.506,pop +Ne-Yo,Sexy Love,220853,False,2006,67,0.693,0.516,8,-6.446,1,0.0413,0.297,0.000127,0.0604,0.494,94.02,"pop, R&B" +JoJo,Too Little Too Late,223680,False,2006,48,0.535,0.823,6,-5.338,0,0.0431,0.0113,3.88e-06,0.146,0.685,83.001,"hip hop, pop, R&B" +Mary J. Blige,One,260466,False,2005,64,0.344,0.874,0,-3.161,1,0.0777,0.0841,0.0,0.129,0.269,185.888,"pop, R&B" +Bubba Sparxxx,Ms. New Booty,252653,True,2005,62,0.64,0.62,1,-5.931,1,0.416,0.016,0.0,0.0831,0.609,129.37,"hip hop, pop, country" +Amy Winehouse,You Know I'm No Good,256946,False,2006,63,0.705,0.806,9,-3.607,1,0.0309,0.0132,0.00428,0.0701,0.734,103.383,R&B +Rascal Flatts,What Hurts The Most,214106,False,2006,69,0.537,0.674,5,-5.134,0,0.0277,0.0088,0.0,0.265,0.33,136.002,country +Red Hot Chili Peppers,Dani California,282160,False,2006,76,0.556,0.913,0,-2.36,1,0.0437,0.0193,8.59e-06,0.346,0.73,96.184,rock +Scissor Sisters,I Don't Feel Like Dancin',288360,False,2006,63,0.707,0.923,7,-3.409,1,0.0276,0.0195,0.0,0.342,0.845,108.023,"pop, rock, Dance/Electronic" +Ne-Yo,So Sick,207186,False,2006,75,0.452,0.574,6,-8.336,1,0.31,0.246,0.0,0.189,0.58,92.791,"pop, R&B" +Hinder,Lips Of An Angel,261053,False,2019,35,0.474,0.744,2,-5.386,1,0.0341,0.0208,1.22e-06,0.209,0.238,129.005,"pop, rock, metal" +The Raconteurs,"Steady, As She Goes",215266,False,2006,56,0.525,0.578,9,-4.563,1,0.12,0.0132,0.00916,0.108,0.537,123.696,rock +The Fratellis,Chelsea Dagger,215306,False,2006,68,0.511,0.815,7,-3.14,1,0.144,0.0491,0.0,0.0826,0.586,154.514,rock +Fort Minor,Where'd You Go (feat. Holly Brook & Jonah Matranga),231866,True,2005,42,0.684,0.819,1,-3.309,0,0.238,0.262,0.00197,0.113,0.25,179.999,"hip hop, pop, rock" +The Pussycat Dolls,Stickwitu,207506,False,2005,67,0.548,0.554,9,-6.408,1,0.0587,0.283,0.0,0.0708,0.382,79.918,"pop, R&B" +Panic! At The Disco,I Write Sins Not Tragedies,185586,False,2005,70,0.566,0.815,9,-4.481,0,0.14,0.0737,0.0,0.12,0.672,169.961,rock +James Blunt,Goodbye My Lover,258653,False,2005,67,0.432,0.261,4,-12.591,1,0.0512,0.953,0.00342,0.0903,0.273,89.567,pop +Christina Aguilera,Ain't No Other Man,228906,False,2006,63,0.862,0.742,7,-4.722,1,0.222,0.00376,0.00855,0.103,0.511,127.91,pop +Westlife,The Rose,219106,False,2006,0,0.272,0.203,9,-9.706,1,0.0294,0.784,0.0,0.0805,0.172,109.581,pop +Keane,Is It Any Wonder?,186173,False,2006,59,0.489,0.955,11,-2.771,0,0.0363,3.82e-05,0.000178,0.334,0.816,129.352,pop +Orson,No Tomorrow,167493,False,2006,46,0.656,0.624,3,-4.623,0,0.0691,0.0065,0.0,0.278,0.735,124.082,pop +T-Pain,I'm N Luv (Wit a Stripper) (feat. Mike Jones),265333,True,2005,0,0.731,0.368,8,-10.38,1,0.0688,0.00544,0.0,0.193,0.512,145.171,"hip hop, pop, R&B" +KT Tunstall,Black Horse And The Cherry Tree,172373,False,2005,64,0.748,0.786,4,-7.788,0,0.0641,0.328,0.0,0.34,0.917,104.833,"World/Traditional, pop, Folk/Acoustic" +Natasha Bedingfield,Unwritten,259333,False,2004,2,0.706,0.8,5,-6.333,1,0.0399,0.00584,0.0,0.0822,0.629,100.011,pop +Razorlight,America,249760,False,2006,67,0.447,0.568,2,-4.975,1,0.0305,0.295,3.26e-06,0.355,0.149,90.933,"rock, pop" +James Morrison,You Give Me Something,213173,False,2006,64,0.559,0.688,7,-6.496,1,0.114,0.216,0.0,0.0855,0.578,79.753,"pop, R&B" +Snow Patrol,You're All I Have,273333,False,2006,35,0.467,0.921,2,-4.554,1,0.045,5.48e-05,0.0566,0.176,0.501,132.013,"World/Traditional, rock, pop" +The Fray,How to Save a Life,262533,False,2005,79,0.64,0.743,10,-4.08,1,0.0379,0.269,0.0,0.101,0.361,122.035,pop +Sandi Thom,I Wish I Was a Punk Rocker (with Flowers in My Hair),151640,False,2006,61,0.7,0.465,4,-6.815,1,0.358,0.544,0.0,0.606,0.719,108.102,"World/Traditional, pop" +My Chemical Romance,Welcome to the Black Parade,311106,False,2006,76,0.217,0.905,2,-4.103,1,0.0752,0.000289,0.00011,0.222,0.236,96.95,rock +Razorlight,In The Morning,222453,False,2006,59,0.616,0.855,4,-3.495,0,0.042,0.00379,0.000863,0.318,0.686,124.191,"rock, pop" +The All-American Rejects,Dirty Little Secret,193653,False,2005,65,0.469,0.955,10,-4.253,1,0.0432,0.000343,1.35e-06,0.548,0.462,143.853,"rock, pop" +James Morrison,Wonderful World,210066,False,2006,49,0.474,0.71,1,-4.606,1,0.029,0.0548,0.0,0.0974,0.591,78.969,"pop, R&B" +Chris Brown,Yo (Excuse Me Miss),229040,False,2005,69,0.536,0.612,4,-5.847,1,0.272,0.119,0.0,0.209,0.57,86.768,"hip hop, pop, R&B" +P!nk,Who Knew,208493,False,2006,0,0.688,0.734,9,-4.569,1,0.0274,0.00462,0.0,0.0756,0.46,140.004,pop +The Kooks,Naive,203506,False,2006,73,0.391,0.808,8,-6.209,0,0.0892,0.0759,0.0,0.149,0.737,100.304,rock +Leona Lewis,Bleeding Love,262466,False,2007,75,0.638,0.656,5,-5.886,1,0.0357,0.188,0.0,0.146,0.225,104.036,"pop, R&B" +Rihanna,Umbrella,275986,False,2008,81,0.583,0.829,1,-4.603,1,0.134,0.00864,0.0,0.0426,0.575,174.028,"hip hop, pop, R&B" +Britney Spears,Gimme More,251240,False,2007,79,0.788,0.844,2,-3.131,1,0.0334,0.25,0.000678,0.0723,0.382,113.324,pop +Fergie,Big Girls Don't Cry (Personal),268120,False,2006,76,0.708,0.641,7,-4.296,1,0.0335,0.205,0.0,0.0945,0.253,113.082,"pop, R&B" +Mark Ronson,Valerie (feat. Amy Winehouse) - Version Revisited,219413,False,2007,0,0.698,0.844,1,-4.789,1,0.0544,0.00253,0.000426,0.124,0.896,105.828,pop +Plain White T's,Hey There Delilah,232533,False,2005,78,0.656,0.291,2,-10.572,1,0.0293,0.872,0.0,0.114,0.298,103.971,pop +Justin Timberlake,What Goes Around.../...Comes Around (Interlude),448573,False,2006,70,0.687,0.723,7,-4.751,1,0.0709,0.122,0.00102,0.573,0.432,76.0,pop +Gwen Stefani,The Sweet Escape,246466,False,2006,71,0.756,0.77,1,-3.502,1,0.0343,0.191,0.0,0.178,0.73,119.961,"pop, R&B" +Timbaland,The Way I Are,179120,False,2007,80,0.731,0.807,3,-6.492,0,0.1,0.181,0.751,0.319,0.765,114.759,"hip hop, pop, R&B" +Nelly Furtado,Say It Right,223080,False,2006,81,0.872,0.872,1,-6.328,1,0.139,0.0476,0.00112,0.0543,0.809,116.948,"hip hop, pop, latin" +Ida Corr,Let Me Think About It,151973,False,2012,43,0.762,0.754,0,-3.425,0,0.046,0.00022,0.0665,0.146,0.715,129.026,set() +Alex Gaudino,Destination Calabria - Radio Edit,223111,False,2007,73,0.627,0.954,2,-4.804,1,0.0425,0.0015,0.000814,0.292,0.312,128.016,Dance/Electronic +September,Cry for You,209800,False,2007,65,0.767,0.881,9,-3.988,1,0.0301,0.00133,0.000139,0.0551,0.961,130.018,"pop, Dance/Electronic" +Akon,Smack That,212360,True,2006,58,0.94,0.743,5,-5.166,0,0.0475,0.317,0.0,0.0909,0.932,118.988,pop +Timbaland,Give It To Me,234026,True,2007,70,0.975,0.711,8,-3.904,1,0.0632,0.168,0.000552,0.0799,0.815,110.621,"hip hop, pop, R&B" +50 Cent,Ayo Technology,247946,True,2007,67,0.63,0.782,8,-5.44,0,0.132,0.0828,0.0,0.034,0.418,140.144,"hip hop, pop" +Eric Prydz,Proper Education - Radio Edit,198567,False,2007,0,0.537,0.937,0,-4.543,1,0.0523,0.00102,0.035,0.0917,0.324,124.938,"pop, Dance/Electronic" +Fedde Le Grand,Put Your Hands Up For Detroit - Radio Edit,150533,False,2015,0,0.827,0.931,7,-4.474,1,0.202,0.0153,0.21,0.0992,0.491,127.995,"pop, Dance/Electronic" +Ciara,Like a Boy,237053,False,2006,68,0.701,0.724,0,-5.751,0,0.145,0.267,0.0,0.0867,0.425,132.035,"pop, R&B" +Kanye West,Stronger,311866,True,2007,54,0.617,0.717,10,-7.858,0,0.153,0.00564,0.0,0.408,0.49,103.992,hip hop +Beyoncé,Beautiful Liar,199853,False,2007,64,0.764,0.751,8,-3.74,1,0.0703,0.00554,0.000237,0.164,0.418,91.977,"pop, R&B" +Gym Class Heroes,Cupid's Chokehold / Breakfast in America - Radio Mix,237173,False,2007,59,0.584,0.681,1,-5.084,0,0.0781,0.495,0.0,0.289,0.821,79.702,"hip hop, pop" +Akon,I Wanna Love You,247066,True,2006,53,0.865,0.45,0,-9.387,1,0.0417,0.0359,1.28e-06,0.308,0.352,99.989,pop +Soulja Boy,Crank That (Soulja Boy),221933,False,2007,74,0.736,0.74,0,-2.18,1,0.0786,0.515,0.0,0.0468,0.803,140.141,"hip hop, pop" +Fat Joe,Make It Rain,247413,True,2006,59,0.744,0.697,11,-5.063,0,0.197,0.0106,0.0,0.0842,0.767,149.28,"hip hop, pop" +P!nk,U + Ur Hand,214386,True,2006,0,0.709,0.891,0,-3.688,1,0.0528,0.00144,0.0,0.034,0.886,141.04,pop +Rihanna,Shut Up And Drive,212280,False,2008,69,0.735,0.826,1,-4.902,1,0.0349,0.00101,0.129,0.197,0.74,132.964,"hip hop, pop, R&B" +Lloyd,Get It Shawty,209533,False,2007,62,0.822,0.905,5,-4.032,0,0.241,0.123,0.0,0.0496,0.674,128.014,"hip hop, pop, R&B" +Kanye West,Can't Tell Me Nothing,271600,True,2007,47,0.596,0.62,5,-6.133,0,0.039,0.0122,0.0,0.82,0.102,80.029,hip hop +Camille Jones,The Creeps - Fedde Le Grand Radio Mix,152333,False,2006,17,0.847,0.861,6,-6.632,1,0.0603,0.278,0.029,0.0626,0.724,127.894,set() +Linkin Park,What I've Done,205613,False,2007,77,0.623,0.93,5,-5.285,1,0.0324,0.0141,1.64e-06,0.138,0.287,120.119,"rock, metal" +Keyshia Cole,Last Night,255706,False,2007,60,0.918,0.857,3,-5.032,0,0.0623,0.166,0.0003,0.0855,0.972,121.006,"hip hop, pop, R&B" +Chris Brown,Kiss Kiss (feat. T-Pain),250666,False,2007,68,0.729,0.658,10,-3.386,0,0.225,0.0506,0.0,0.0693,0.551,140.043,"hip hop, pop, R&B" +Shop Boyz,Party Like A Rock Star,253400,True,2007,61,0.619,0.709,7,-4.633,1,0.0395,0.00541,0.0,0.507,0.68,144.972,"hip hop, pop" +Finger Eleven,Paralyzer,208106,False,2007,73,0.644,0.939,11,-3.486,0,0.0456,0.157,0.0,0.233,0.861,106.031,"rock, metal" +Justin Timberlake,Summer Love,252973,False,2006,66,0.606,0.591,0,-7.228,0,0.087,0.0756,0.0,0.328,0.477,95.802,pop +MiMS,This Is Why I'm Hot,253706,True,2007,59,0.621,0.6,8,-6.349,1,0.576,0.277,0.0,0.495,0.485,80.021,"hip hop, pop" +Fabolous,Make Me Better,253573,True,2007,62,0.607,0.599,11,-6.886,1,0.0995,0.33,0.0,0.199,0.562,86.491,"hip hop, pop, R&B" +Seether,Fake It,193893,True,2007,73,0.611,0.95,8,-3.509,1,0.0518,0.00141,1.59e-06,0.0543,0.607,132.078,"rock, metal" +Take That,Patience,202066,False,2006,64,0.309,0.783,10,-4.154,1,0.0405,0.142,1.11e-06,0.118,0.372,175.788,pop +Arctic Monkeys,Brianstorm,172866,False,2007,67,0.42,0.974,1,-4.706,1,0.191,8.15e-05,0.00155,0.0871,0.463,165.182,rock +Justin Timberlake,My Love (feat. T.I.),276160,False,2006,72,0.771,0.68,11,-5.881,0,0.224,0.277,1.18e-05,0.682,0.808,119.952,pop +Fergie,Glamorous,246600,True,2006,64,0.811,0.757,0,-6.447,1,0.23,0.298,0.0,0.103,0.565,130.993,"pop, R&B" +Linkin Park,Bleed It Out,166373,True,2007,73,0.529,0.967,1,-3.647,0,0.0508,0.114,0.0,0.61,0.596,140.127,"rock, metal" +My Chemical Romance,Teenagers,161920,False,2006,80,0.463,0.857,4,-3.063,1,0.0632,0.0506,0.0,0.184,0.856,111.647,rock +Calvin Harris,Acceptable in the 80's,333680,False,2007,57,0.787,0.808,7,-5.454,1,0.0511,0.0143,0.257,0.0466,0.942,127.99,"hip hop, pop, Dance/Electronic" +Sugababes,About You Now,212400,False,2007,66,0.584,0.699,1,-6.065,0,0.0378,6.52e-05,6.05e-05,0.486,0.585,82.499,"pop, R&B" +Avril Lavigne,Girlfriend,216600,True,2007,75,0.56,0.959,7,-2.433,1,0.102,0.000722,0.000221,0.209,0.669,163.983,pop +Ne-Yo,Because Of You,266840,False,2007,74,0.81,0.538,0,-5.784,0,0.0356,0.528,0.0,0.0951,0.828,109.97,"pop, R&B" +Daughtry,It's Not Over,215173,False,2016,45,0.45,0.921,10,-3.476,0,0.0538,0.0467,0.0,0.311,0.413,145.959,"pop, rock, metal" +Unk,Walk It Out,173040,True,2013,48,0.794,0.777,1,-3.696,0,0.0813,0.0982,0.0,0.202,0.706,160.022,"hip hop, pop" +Lloyd,You,273133,False,2007,70,0.412,0.876,5,-6.008,0,0.47,0.102,0.0,0.297,0.703,130.159,"hip hop, pop, R&B" +Klaxons,Golden Skans,165120,False,2007,65,0.463,0.836,2,-2.776,0,0.0381,0.000418,7.38e-06,0.25,0.713,141.955,rock +T-Pain,Bartender (feat. Akon),238800,True,2007,3,0.675,0.394,8,-8.539,1,0.133,0.0611,0.0,0.214,0.405,104.987,"hip hop, pop, R&B" +Baby Boy Da Prince,The Way I Live - Main Explicit,324906,True,2006,59,0.88,0.438,0,-7.562,1,0.248,0.0147,0.0,0.0907,0.714,90.02,"hip hop, pop" +Fergie,Fergalicious,292373,True,2006,63,0.906,0.584,8,-7.72,0,0.316,0.0576,0.0,0.128,0.831,129.055,"pop, R&B" +Omarion,Ice Box,256426,False,2006,57,0.775,0.731,8,-5.446,1,0.134,0.189,0.0,0.129,0.821,131.105,"hip hop, pop, R&B" +Pretty Ricky,On the Hotline - Amended Version,242586,False,2007,57,0.704,0.854,10,-5.477,0,0.183,0.0185,0.0,0.148,0.688,92.988,"hip hop, pop, R&B" +Nickelback,Rockstar,252040,False,2005,69,0.616,0.91,0,-3.004,1,0.0386,0.0459,0.0,0.343,0.693,144.072,"rock, metal" +Fall Out Boy,Thnks fr th Mmrs,203506,False,2007,76,0.459,0.891,10,-5.057,0,0.0623,0.00511,0.0,0.106,0.588,154.837,rock +Justin Timberlake,LoveStoned / I Think She Knows (Interlude),444333,True,2006,57,0.859,0.681,1,-6.247,1,0.0549,0.34,0.000161,0.213,0.852,121.243,pop +All Time Low,"Dear Maria, Count Me In",182826,False,2007,76,0.459,0.895,2,-3.126,1,0.0805,0.00725,0.0,0.206,0.572,181.04,rock +Amy Winehouse,Back To Black,241293,True,2006,76,0.403,0.422,7,-13.964,0,0.0373,0.134,2.05e-05,0.0861,0.378,122.728,R&B +Keyshia Cole,Let It Go,238360,False,2007,58,0.808,0.721,1,-5.165,1,0.213,0.197,0.0,0.205,0.773,94.955,"hip hop, pop, R&B" +Ciara,Promise,267413,False,2006,56,0.697,0.629,9,-5.757,1,0.0425,0.409,3.8e-06,0.0819,0.439,123.279,"pop, R&B" +Enrique Iglesias,Do You Know? (The Ping Pong Song),219533,False,2007,45,0.753,0.759,8,-4.829,1,0.0487,0.0523,0.0,0.0263,0.706,114.999,"pop, latin" +T2,Heartbroken - Edit,178613,False,2017,59,0.716,0.757,11,-5.438,0,0.0329,0.0258,0.00392,0.16,0.877,139.986,Dance/Electronic +Robyn,With Every Heartbeat,254920,False,2005,36,0.477,0.865,4,-4.462,0,0.0395,0.00877,4.52e-05,0.271,0.413,120.872,"pop, Dance/Electronic" +Maroon 5,Makes Me Wonder,211080,True,2007,62,0.803,0.851,7,-2.429,1,0.0357,0.00485,0.000358,0.0577,0.881,113.996,pop +Fall Out Boy,"This Ain't A Scene, It's An Arms Race",212040,False,2007,68,0.435,0.887,9,-4.07,1,0.0636,0.000747,0.0,0.0327,0.408,199.935,rock +Paramore,Misery Business,211520,False,2007,72,0.517,0.906,1,-3.677,1,0.0735,0.00272,9.26e-06,0.113,0.731,172.977,"pop, rock" +Hinder,Better Than Me,223533,False,2019,30,0.451,0.682,2,-5.295,0,0.029,0.205,0.0,0.175,0.254,139.913,"pop, rock, metal" +Huey,"Pop, Lock & Drop It - Video Edit",261026,False,2006,52,0.723,0.644,11,-6.863,0,0.222,0.00308,0.0,0.352,0.645,144.09,"hip hop, pop" +Foo Fighters,The Pretender,269373,False,2007,78,0.433,0.959,9,-4.04,1,0.0431,0.000917,0.0,0.028,0.365,172.984,"rock, metal" +Plies,Shawty (feat. T Pain),255413,True,2007,61,0.444,0.708,4,-5.926,0,0.126,0.0419,0.0,0.0775,0.26,105.412,"hip hop, pop, R&B" +Kaiser Chiefs,Ruby,204200,False,2007,67,0.451,0.939,5,-2.82,0,0.0512,0.00673,4.85e-06,0.0774,0.448,93.416,rock +Mark Ronson,Stop Me (feat. Daniel Merriweather),232946,False,2007,46,0.626,0.906,0,-4.334,1,0.0403,0.00128,8.52e-06,0.137,0.346,118.035,pop +Bow Wow,Outta My System (feat. T-Pain & Johntá Austin),238266,False,2006,57,0.772,0.59,6,-6.957,1,0.183,0.0917,0.0,0.101,0.743,83.998,"hip hop, pop, R&B" +Jim Jones,We Fly High,236080,True,2006,0,0.685,0.8,1,-6.564,1,0.298,0.0847,0.0,0.092,0.483,120.226,"hip hop, pop" +The Red Jumpsuit Apparatus,Face Down,192000,False,2006,74,0.545,0.932,7,-2.189,0,0.0399,0.000665,0.0,0.127,0.464,92.956,pop +MIKA,Grace Kelly,187733,False,2006,69,0.675,0.828,0,-5.799,1,0.0454,0.0242,0.0102,0.364,0.669,122.229,pop +Christina Aguilera,Candyman,194213,False,2006,66,0.686,0.789,8,-4.713,0,0.23,0.0125,0.0147,0.142,0.72,172.976,pop +Sean Kingston,Beautiful Girls,225373,False,2007,78,0.762,0.661,1,-6.075,0,0.0687,0.15,0.0,0.256,0.769,130.009,"hip hop, pop, R&B" +Akon,Don't Matter,293066,True,2006,52,0.801,0.454,2,-6.035,1,0.0371,0.225,0.0,0.226,0.34,125.139,pop +James Blunt,1973,280026,False,2007,68,0.72,0.668,9,-7.928,1,0.0269,0.0652,0.00664,0.0789,0.768,123.007,pop +Carrie Underwood,Before He Cheats,199946,False,2005,76,0.519,0.749,6,-3.318,0,0.0405,0.271,0.0,0.119,0.29,147.905,"pop, country" +Just Jack,Starz In Their Eyes,295933,False,2006,52,0.67,0.8,3,-6.731,1,0.0652,0.00184,0.000356,0.466,0.656,123.802,Dance/Electronic +T-Pain,Buy U a Drank (Shawty Snappin') (feat. Yung Joc),227960,False,2007,2,0.451,0.55,1,-8.137,1,0.262,0.0108,0.0,0.0737,0.594,80.001,"hip hop, pop, R&B" +Bone Thugs-N-Harmony,I Tried,287480,True,2007,61,0.727,0.71,4,-6.142,1,0.0742,0.0104,0.0,0.107,0.378,81.995,"hip hop, pop" +Avril Lavigne,When You're Gone,240493,False,2007,68,0.457,0.719,4,-3.921,0,0.0323,0.191,0.0,0.228,0.168,142.034,pop +Rich Boy,Throw Some D's,263920,True,2007,57,0.697,0.86,11,-5.181,1,0.43,0.0368,0.0,0.0721,0.433,160.895,"hip hop, pop" +MIKA,Love Today,235173,False,2006,57,0.673,0.913,4,-4.981,1,0.0664,0.0304,0.00298,0.113,0.587,124.484,pop +Manic Street Preachers,Your Love Alone Is Not Enough (feat. Nina Persson),235693,False,2007,53,0.344,0.921,2,-3.049,1,0.0459,0.00143,1.13e-05,0.38,0.396,126.766,"rock, pop" +Elliott Yamin,Wait for You,261320,False,2007,58,0.764,0.487,0,-6.734,1,0.0281,0.25,0.0,0.184,0.352,116.027,set() +Wyclef Jean,"Sweetest Girl (Dollar Bill) (feat. Akon, Lil' Wayne & Niia)",241133,False,2007,56,0.733,0.744,1,-3.51,1,0.0553,0.0772,0.0,0.341,0.529,92.515,set() +Boys Like Girls,The Great Escape,206520,False,2007,62,0.423,0.94,1,-4.012,0,0.0635,0.00166,0.0,0.178,0.505,149.934,pop +Take That,Rule The World - Radio Edit,237760,False,2007,58,0.357,0.774,2,-4.226,1,0.034,0.0376,0.0,0.348,0.348,164.054,pop +J. Holiday,Bed,275106,False,2007,64,0.684,0.606,5,-7.268,0,0.0504,0.17,0.0,0.058,0.723,127.901,"hip hop, pop, R&B, Dance/Electronic" +Red Hot Chili Peppers,Snow (Hey Oh),334666,False,2006,79,0.427,0.9,11,-3.674,1,0.0499,0.116,1.75e-05,0.119,0.599,104.655,rock +Ludacris,Runaway Love,280680,True,2006,51,0.405,0.721,1,-6.97,1,0.283,0.329,0.0,0.252,0.815,92.854,"hip hop, pop" +Flo Rida,Low (feat. T-Pain),231400,False,2008,80,0.918,0.609,10,-5.64,0,0.0791,0.0928,0.0,0.139,0.304,128.008,"hip hop, pop" +Katy Perry,I Kissed A Girl,179640,False,2008,73,0.699,0.76,5,-3.173,1,0.0677,0.00223,0.0,0.132,0.696,129.996,pop +Rihanna,Take A Bow,229413,False,2008,74,0.697,0.467,9,-7.536,1,0.0715,0.248,0.0,0.0941,0.572,82.082,"hip hop, pop, R&B" +Alicia Keys,No One,253813,False,2007,77,0.644,0.549,1,-5.415,0,0.0285,0.0209,8.85e-06,0.134,0.167,90.04,"pop, R&B" +Timbaland,Apologize,184400,False,2007,74,0.653,0.604,8,-6.017,1,0.0278,0.0292,0.0,0.097,0.101,118.016,"hip hop, pop, R&B" +Ne-Yo,Miss Independent,232000,False,2008,71,0.668,0.673,1,-5.714,1,0.145,0.48,0.0,0.194,0.727,171.812,"pop, R&B" +Chris Brown,With You,252120,False,2007,70,0.662,0.693,3,-4.298,1,0.0698,0.134,0.0,0.145,0.655,86.009,"hip hop, pop, R&B" +Beyoncé,If I Were a Boy,249146,False,2008,66,0.632,0.518,6,-6.126,1,0.0313,0.107,0.0,0.354,0.427,90.007,"pop, R&B" +Kardinal Offishall,Dangerous,246053,True,2008,75,0.949,0.79,8,-5.957,1,0.0756,0.00437,0.0,0.0816,0.807,117.002,"hip hop, pop, R&B" +Madcon,Beggin (original version),216146,False,2007,70,0.715,0.8,4,-5.144,0,0.057,0.0271,0.0,0.0648,0.445,129.023,"hip hop, pop" +September,Cry for You,209800,False,2007,65,0.767,0.881,9,-3.988,1,0.0301,0.00133,0.000139,0.0551,0.961,130.018,"pop, Dance/Electronic" +Rihanna,Disturbia,238626,False,2008,76,0.707,0.813,11,-4.515,0,0.0571,0.0863,0.0,0.168,0.722,124.921,"hip hop, pop, R&B" +Guru Josh Project,Infinity 2008,190013,False,2011,1,0.493,0.849,7,-6.139,0,0.0576,0.000337,0.00672,0.355,0.483,127.999,hip hop +Britney Spears,Break the Ice,196053,False,2007,61,0.712,0.911,5,-3.866,0,0.0445,0.689,8.02e-06,0.107,0.85,117.533,pop +Lil Wayne,Lollipop,299333,True,2008,69,0.829,0.428,0,-9.469,1,0.0831,0.056,0.00413,0.137,0.45,148.075,"hip hop, pop" +Basshunter,Now You're Gone - Video Edit,148186,False,2008,63,0.639,0.976,1,-5.503,1,0.354,0.0213,0.0,0.0856,0.354,147.99,pop +Kanye West,Flashing Lights,237506,True,2007,52,0.639,0.628,6,-7.578,0,0.0399,0.0381,0.0,0.386,0.43,90.482,hip hop +Madonna,4 Minutes (feat. Justin Timberlake & Timbaland),189693,False,2009,71,0.753,0.931,2,-4.922,1,0.0652,0.00994,0.00696,0.234,0.767,113.029,pop +Katy Perry,Hot N Cold,220226,False,2008,73,0.706,0.841,7,-3.956,1,0.0418,7.95e-05,0.0,0.0688,0.861,132.032,pop +Rihanna,Don't Stop The Music,267080,False,2008,77,0.835,0.669,6,-5.582,0,0.0643,0.0336,6.92e-05,0.0535,0.542,122.668,"hip hop, pop, R&B" +Snoop Dogg,Sensual Seduction,245520,False,2007,56,0.756,0.829,11,-3.973,1,0.0682,0.003,3.46e-05,0.0578,0.504,120.163,"hip hop, pop" +Akon,Right Now (Na Na Na),240746,False,2008,69,0.83,0.857,8,-4.194,0,0.152,0.262,0.0,0.413,0.607,137.982,pop +Britney Spears,Womanizer,224400,False,2008,76,0.724,0.695,11,-5.226,1,0.0622,0.073,0.0,0.0889,0.235,139.0,pop +Eric Prydz,Pjanoo - Radio Edit,157432,False,2008,65,0.605,0.874,7,-4.949,0,0.0295,0.000563,0.812,0.112,0.836,125.99,"pop, Dance/Electronic" +Kelly Rowland,Work,207920,False,2010,34,0.888,0.823,10,-6.351,0,0.0538,0.00499,0.0,0.291,0.623,102.757,"hip hop, pop, R&B" +Jeezy,Put On,321293,True,2008,69,0.654,0.77,9,-6.091,0,0.0353,0.00342,0.0,0.0887,0.272,137.616,"hip hop, pop" +Madonna,Give It 2 Me,287906,False,2008,57,0.837,0.954,8,-3.512,0,0.0414,0.0933,0.000405,0.143,0.972,127.019,pop +The Pussycat Dolls,When I Grow Up,245680,False,2008,72,0.671,0.685,11,-5.762,0,0.05,0.00165,0.0,0.398,0.368,118.449,"pop, R&B" +will.i.am,Heartbreaker,327880,True,2007,46,0.788,0.549,9,-8.793,0,0.0732,0.0352,0.127,0.147,0.449,120.051,"hip hop, pop" +Soulja Boy,Crank That (Soulja Boy),221933,False,2007,74,0.736,0.74,0,-2.18,1,0.0786,0.515,0.0,0.0468,0.803,140.141,"hip hop, pop" +Britney Spears,Piece of Me,212106,False,2007,64,0.769,0.638,11,-5.054,1,0.216,0.0902,0.0,0.0857,0.782,115.007,pop +T.I.,Live Your Life,338853,True,2008,75,0.375,0.862,11,-3.363,0,0.255,0.071,0.0,0.211,0.478,159.841,"hip hop, pop" +Ne-Yo,Closer,234360,False,2008,65,0.709,0.745,4,-6.437,0,0.0738,0.0225,5.2e-05,0.154,0.576,126.027,"pop, R&B" +Lil Wayne,A Milli,221840,True,2008,69,0.674,0.695,6,-8.636,0,0.278,0.0387,0.00202,0.194,0.773,151.486,"hip hop, pop" +Colby O'Donis,What You Got,243013,False,2008,61,0.775,0.641,1,-6.718,1,0.038,0.0327,0.0,0.18,0.305,119.974,"hip hop, pop, R&B" +Miley Cyrus,See You Again,190453,False,2007,0,0.692,0.911,9,-5.098,0,0.177,0.0149,7.34e-05,0.112,0.801,138.975,pop +Basshunter,All I Ever Wanted - Radio Edit,176453,False,2008,65,0.645,0.984,4,-7.051,1,0.0508,0.164,0.00701,0.164,0.553,144.954,pop +Chris Brown,Kiss Kiss (feat. T-Pain),250666,False,2007,68,0.729,0.658,10,-3.386,0,0.225,0.0506,0.0,0.0693,0.551,140.043,"hip hop, pop, R&B" +Finger Eleven,Paralyzer,208106,False,2007,73,0.644,0.939,11,-3.486,0,0.0456,0.157,0.0,0.233,0.861,106.031,"rock, metal" +Alesha Dixon,The Boy Does Nothing,210680,False,2008,51,0.632,0.972,3,-2.423,0,0.121,0.121,7.74e-05,0.0317,0.845,87.0,pop +Dizzee Rascal,Dance Wiv Me - Radio Edit,204093,False,2011,68,0.878,0.746,11,-4.281,1,0.0451,0.0476,0.0,0.154,0.792,111.996,"hip hop, pop, Dance/Electronic" +Gabriella Cilmi,Sweet About Me,202133,False,2008,56,0.671,0.701,6,-5.035,1,0.0288,0.0607,2.7e-06,0.163,0.585,131.977,R&B +Jack White,Another Way to Die,262240,False,2007,56,0.486,0.765,4,-4.23,0,0.128,0.0239,3.02e-05,0.11,0.386,141.933,"rock, blues" +Plies,Hypnotized (feat. Akon),188493,True,2007,66,0.84,0.571,10,-7.421,1,0.0797,0.16,0.0,0.406,0.617,116.59,"hip hop, pop, R&B" +The Pussycat Dolls,I Hate This Part,218400,False,2008,64,0.756,0.612,5,-4.371,1,0.0317,0.0659,0.0,0.274,0.452,111.572,"pop, R&B" +Sean Kingston,Take You There,236693,False,2008,65,0.752,0.921,5,-4.321,0,0.0696,0.0182,0.0,0.256,0.634,115.033,"hip hop, pop, R&B" +Natasha Bedingfield,Pocketful of Sunshine,203440,False,2008,62,0.726,0.881,9,-3.892,0,0.0391,0.203,0.0,0.108,0.682,110.019,pop +The Offspring,"You're Gonna Go Far, Kid",177826,True,2008,78,0.55,0.917,0,-3.159,1,0.0638,0.00428,0.0,0.197,0.601,126.115,"rock, pop, metal" +Kanye West,Homecoming,203493,True,2007,47,0.667,0.747,1,-7.059,1,0.189,0.337,0.0,0.115,0.918,86.917,hip hop +Estelle,American Boy,284733,True,2008,78,0.727,0.729,0,-2.99,1,0.326,0.171,0.0,0.07,0.512,117.932,R&B +Saving Abel,Addicted,222826,True,2008,69,0.512,0.864,7,-4.146,1,0.0338,0.000821,0.0,0.0982,0.527,138.018,"rock, metal" +Pendulum,Propane Nightmares,313346,False,2008,57,0.356,0.966,7,-4.13,0,0.0825,0.000262,0.148,0.192,0.215,173.992,Dance/Electronic +The Game,My Life,320893,True,2008,60,0.673,0.766,11,-5.018,1,0.349,0.0724,0.0,0.0563,0.382,148.113,"hip hop, pop" +Danity Kane,Damaged,244266,False,2008,57,0.774,0.653,8,-5.158,0,0.05,0.0912,0.0,0.151,0.818,120.01,"pop, R&B" +P!nk,So What,215160,True,2008,76,0.534,0.87,11,-3.078,0,0.0425,0.000334,0.0,0.241,0.462,126.019,pop +Kanye West,Love Lockdown,270306,False,2008,66,0.76,0.524,1,-7.67,0,0.0323,0.0542,0.5,0.112,0.112,119.603,hip hop +Gym Class Heroes,Cookie Jar (feat. The-Dream),216317,True,2008,54,0.687,0.668,5,-4.061,1,0.0282,0.0253,0.0,0.744,0.418,113.053,"hip hop, pop" +M.I.A.,Paper Planes,205200,False,2007,1,0.447,0.848,2,-6.175,1,0.222,0.033,7.45e-05,0.65,0.485,172.247,"pop, rock" +3 Doors Down,It's Not My Time,241960,False,2008,56,0.529,0.934,0,-4.808,1,0.0602,0.00153,5.16e-06,0.118,0.282,127.962,"pop, rock, metal" +Kylie Minogue,Wow,190973,False,2007,47,0.654,0.884,9,-5.466,1,0.202,0.151,0.000268,0.379,0.851,124.072,"pop, Dance/Electronic" +Mariah Carey,Touch My Body,204733,False,2008,0,0.715,0.665,4,-7.736,0,0.0502,0.0996,0.0,0.224,0.84,78.502,"pop, R&B" +MGMT,Kids,302840,False,2007,77,0.451,0.931,9,-3.871,1,0.0719,0.00076,0.0049,0.361,0.172,122.961,rock +The Ting Tings,Shut Up and Let Me Go,171226,False,2007,55,0.852,0.927,7,-4.497,1,0.0581,0.0109,0.00268,0.054,0.887,107.993,"pop, rock, Dance/Electronic" +Lil Wayne,Got Money,244626,True,2008,58,0.694,0.661,7,-4.847,1,0.118,0.00188,0.0,0.676,0.702,82.48,"hip hop, pop" +Santana,Into the Night (feat. Chad Kroeger),222440,False,2007,57,0.595,0.844,7,-4.678,1,0.0331,0.0137,0.0,0.234,0.603,127.981,"rock, blues, latin" +The-Dream,I Luv Your Girl,267866,True,2007,62,0.723,0.322,8,-9.702,1,0.0341,0.014,0.0,0.119,0.0406,90.063,"hip hop, pop, R&B" +Wiley,Wearing My Rolex - Radio Edit,170480,False,2008,59,0.876,0.716,1,-6.884,1,0.135,0.0468,0.000482,0.0667,0.755,131.942,"hip hop, Dance/Electronic" +Usher,Love in This Club (feat. Young Jeezy),259720,False,2008,71,0.573,0.712,0,-5.976,1,0.0732,0.0572,0.0,0.167,0.346,140.012,"hip hop, pop, R&B" +Sam Sparro,Black & Gold - Radio Edit,212360,False,2008,53,0.383,0.703,4,-5.65,0,0.0763,0.00116,2.39e-05,0.112,0.42,135.968,"pop, Dance/Electronic" +"H ""two"" O",What's It Gonna Be (feat. Platnum),207476,False,2008,55,0.733,0.9,9,-4.758,1,0.0341,0.0085,0.0218,0.151,0.937,139.61,Dance/Electronic +The Ting Tings,That's Not My Name,310573,False,2008,51,0.755,0.901,9,-3.152,1,0.0893,0.0451,0.0373,0.363,0.959,145.042,"pop, rock, Dance/Electronic" +Plies,"Bust It Baby, Pt. 2 (feat. Ne-Yo)",240760,True,2008,61,0.648,0.801,5,-7.24,0,0.167,0.154,0.0,0.339,0.807,78.946,"hip hop, pop, R&B" +James Morrison,Broken Strings,250453,False,2008,70,0.57,0.717,1,-4.914,1,0.029,0.00967,0.0,0.0833,0.321,111.91,"pop, R&B" +Leona Lewis,Run,314720,False,2007,62,0.285,0.462,5,-6.166,0,0.0291,0.428,5.61e-06,0.122,0.0923,142.365,"pop, R&B" +Metro Station,Shake It,179946,False,2007,67,0.618,0.955,4,-3.836,1,0.0798,0.00221,3.09e-06,0.486,0.79,150.034,"pop, rock" +T.I.,Whatever You Like,249533,True,2008,74,0.68,0.687,9,-6.162,0,0.0709,0.0161,0.0,0.261,0.467,150.053,"hip hop, pop" +Trey Songz,Can't Help but Wait,206413,False,2007,62,0.699,0.699,8,-5.564,0,0.0737,0.358,0.0,0.0912,0.611,94.977,"hip hop, pop, R&B" +OneRepublic,Stop And Stare,223853,False,2007,59,0.492,0.859,4,-4.274,1,0.0332,0.0659,0.0,0.0756,0.251,92.474,pop +Alexandra Burke,Hallelujah,217826,False,2009,63,0.177,0.425,2,-6.211,0,0.0291,0.654,0.0,0.195,0.0942,182.571,pop +Mary J. Blige,Just Fine,242133,False,2007,64,0.923,0.795,11,-3.61,0,0.117,0.0394,0.000565,0.101,0.588,123.021,"pop, R&B" +Jordin Sparks,One Step At a Time,205160,False,2007,56,0.766,0.692,1,-4.672,1,0.0289,0.0825,0.0,0.0384,0.691,102.028,"pop, R&B" +Coldplay,Viva La Vida,242373,False,2008,80,0.486,0.617,5,-7.115,0,0.0287,0.0954,3.23e-06,0.109,0.417,138.015,"rock, pop" +Kings of Leon,Sex on Fire,203346,False,2008,80,0.542,0.905,9,-5.653,1,0.054,0.00172,0.0104,0.136,0.374,153.398,rock +Kanye West,Good Life,207000,True,2007,45,0.439,0.808,1,-6.881,1,0.346,0.00305,0.0,0.439,0.487,82.962,hip hop +Duffy,Mercy,219920,False,2008,69,0.793,0.859,0,-3.774,1,0.0332,0.266,0.000356,0.133,0.964,129.911,"pop, R&B" +David Archuleta,Crush,213520,False,2008,67,0.57,0.664,0,-4.718,0,0.0322,0.00836,1.2e-05,0.0719,0.487,162.084,pop +J. Holiday,Suffocate - Superclean,220053,False,2007,58,0.458,0.445,9,-8.391,0,0.361,0.64,0.0,0.118,0.447,82.82,"hip hop, pop, R&B, Dance/Electronic" +Gavin DeGraw,In Love With a Girl,206000,False,2008,51,0.438,0.906,3,-4.8,1,0.0438,0.0191,0.0,0.373,0.684,161.905,"pop, Folk/Acoustic" +Jordin Sparks,Tattoo,233466,False,2007,54,0.566,0.766,2,-5.036,1,0.0399,0.431,0.0,0.101,0.547,168.005,"pop, R&B" +Coldplay,Violet Hill,222653,False,2008,63,0.33,0.58,1,-7.875,0,0.0374,0.0614,0.00129,0.115,0.11,76.093,"rock, pop" +Ray J,Sexy Can I feat. Yung Berg,204040,True,2008,1,0.575,0.684,1,-6.007,1,0.325,0.299,0.0,0.614,0.926,172.155,"hip hop, pop, R&B" +Jennifer Hudson,Spotlight,250106,False,2008,60,0.707,0.724,11,-3.887,0,0.051,0.0448,0.0,0.072,0.659,108.984,"pop, R&B, easy listening" +John Legend,Green Light (feat. André 3000),284186,False,2008,55,0.622,0.882,9,-5.59,0,0.119,0.0912,0.0,0.216,0.762,77.506,"pop, R&B" +Jonas Brothers,Burnin' Up,175093,False,2008,68,0.667,0.954,2,-3.462,1,0.0817,0.0296,0.0,0.331,0.807,114.03,pop +Lupe Fiasco,Superstar (feat. Matthew Santos),289000,False,2007,64,0.572,0.822,11,-6.015,0,0.361,0.207,0.0,0.359,0.472,94.812,"hip hop, pop" +Chris Brown,Forever,278573,False,2008,74,0.672,0.82,11,-4.456,1,0.0459,0.0368,0.000188,0.184,0.438,120.005,"hip hop, pop, R&B" +Adele,Chasing Pavements,210506,False,2008,1,0.614,0.47,5,-6.09,0,0.0255,0.291,0.0,0.111,0.329,80.045,"pop, R&B" +Leona Lewis,Better in Time,234173,False,2007,67,0.584,0.7,6,-4.251,1,0.0506,0.512,2.43e-05,0.13,0.549,163.953,"pop, R&B" +Fergie,Clumsy,240426,False,2006,58,0.731,0.563,2,-4.046,1,0.131,0.191,0.00042,0.296,0.452,184.009,"pop, R&B" +T-Pain,Can't Believe It (feat. Lil' Wayne),273826,False,2008,0,0.648,0.516,10,-8.869,0,0.049,0.00179,0.0,0.0772,0.0756,89.828,"hip hop, pop, R&B" +Black Eyed Peas,I Gotta Feeling,289133,False,2009,80,0.743,0.766,0,-6.375,1,0.0265,0.0873,0.0,0.509,0.61,127.96,"hip hop, pop" +Lady Gaga,Poker Face,237200,False,2008,77,0.851,0.806,4,-4.62,1,0.0787,0.118,1.64e-06,0.121,0.787,118.999,pop +Beyoncé,Halo,261640,False,2008,74,0.508,0.72,11,-5.908,0,0.0628,0.272,0.0,0.0563,0.472,79.983,"pop, R&B" +David Guetta,Sexy Bitch (feat. Akon),195853,True,2010,75,0.813,0.627,11,-5.018,0,0.0486,0.0771,0.000616,0.131,0.801,130.011,"hip hop, pop, Dance/Electronic" +Flo Rida,Right Round,204640,False,2009,74,0.72,0.672,7,-6.852,1,0.0551,0.009,0.0,0.232,0.705,124.986,"hip hop, pop" +Taylor Swift,Love Story,236266,False,2008,74,0.617,0.741,2,-3.97,1,0.0311,0.131,0.0,0.0772,0.306,118.984,pop +Pitbull,Hotel Room Service,238506,False,2009,63,0.849,0.599,6,-8.164,1,0.227,0.00301,0.000249,0.0763,0.761,126.003,"hip hop, pop, latin" +Britney Spears,Circus,192360,False,2008,74,0.791,0.733,6,-5.215,0,0.052,0.147,0.000381,0.0713,0.761,114.98,pop +Lady Gaga,LoveGame,216333,False,2008,69,0.894,0.678,6,-5.611,0,0.0523,0.00569,2.43e-06,0.317,0.844,105.024,pop +Skillet,Monster,178013,False,2009,75,0.64,0.957,8,-2.336,1,0.0741,0.0431,0.0,0.0789,0.692,134.992,"rock, pop, metal" +The Prodigy,Omen,216026,False,2009,57,0.545,0.953,7,-5.172,1,0.0441,0.000941,0.117,0.281,0.558,140.002,"rock, pop, Dance/Electronic" +Black Eyed Peas,Meet Me Halfway,284373,False,2009,72,0.798,0.629,11,-6.857,0,0.0735,0.00474,2.17e-05,0.324,0.4,130.0,"hip hop, pop" +Kanye West,Heartless,211000,False,2008,82,0.79,0.647,10,-5.983,0,0.136,0.0515,0.0,0.248,0.654,87.999,hip hop +Kid Cudi,Day 'N' Nite (Nightmare),221240,False,2009,72,0.88,0.443,11,-6.359,0,0.0653,0.462,7.13e-06,0.122,0.803,138.018,hip hop +Lady Gaga,Just Dance,241933,False,2008,76,0.822,0.739,1,-4.541,0,0.0311,0.0264,4.26e-05,0.181,0.745,118.992,pop +Britney Spears,If U Seek Amy,216520,False,2008,69,0.717,0.587,4,-7.296,0,0.0339,0.0192,0.0,0.0523,0.544,129.954,pop +Pitbull,I Know You Want Me (Calle Ocho),237120,False,2009,59,0.825,0.743,2,-5.995,1,0.149,0.0142,2.12e-05,0.237,0.8,127.045,"hip hop, pop, latin" +Mariah Carey,Obsessed,242200,False,2009,66,0.742,0.468,10,-5.557,0,0.0625,0.0465,0.0,0.826,0.369,86.443,"pop, R&B" +Beyoncé,Sweet Dreams,208066,False,2008,48,0.694,0.825,1,-5.986,1,0.111,0.0895,1.86e-05,0.0618,0.788,121.949,"pop, R&B" +Agnes,Release Me,256213,False,2009,65,0.621,0.923,2,-3.124,0,0.0321,0.00645,7.28e-05,0.108,0.716,127.973,"pop, Dance/Electronic" +Black Eyed Peas,Boom Boom Pow,251440,True,2009,68,0.867,0.857,9,-5.892,1,0.0663,0.13,0.00171,0.13,0.402,130.048,"hip hop, pop" +Shakira,She Wolf,188866,False,2009,68,0.865,0.69,7,-7.448,1,0.0443,0.285,0.0162,0.225,0.867,121.983,"pop, latin" +Lady Gaga,Paparazzi,208306,False,2008,70,0.762,0.692,5,-3.973,0,0.0438,0.113,0.0,0.094,0.397,114.906,pop +JAY-Z,Empire State Of Mind,276920,True,2009,82,0.491,0.956,11,-1.538,1,0.392,0.0295,0.0,0.46,0.811,173.585,hip hop +OneRepublic,All The Right Moves,238000,False,2009,65,0.529,0.948,0,-3.527,1,0.0474,0.258,9.35e-06,0.283,0.65,146.024,pop +Jeremih,Birthday Sex,226506,False,2009,67,0.677,0.523,7,-5.603,0,0.0439,0.295,0.0,0.15,0.446,60.019,"hip hop, pop, R&B" +Cascada,Evacuate The Dancefloor,207200,False,2009,63,0.762,0.702,0,-5.87,1,0.0432,0.0167,0.0,0.314,0.898,127.029,"hip hop, pop, Dance/Electronic" +Linkin Park,New Divide,268613,False,2009,68,0.493,0.808,5,-3.365,0,0.0362,0.000235,0.0,0.0983,0.38,117.971,"rock, metal" +Kevin Rudolf,Let It Rock,231173,True,2008,66,0.607,0.783,7,-4.41,1,0.0397,0.000683,0.0,0.0678,0.434,113.172,"hip hop, pop" +JAY-Z,Run This Town,267520,True,2009,75,0.632,0.924,1,-1.802,1,0.29,0.281,0.0,0.263,0.441,86.844,hip hop +Lady Gaga,Bad Romance,294573,True,2009,80,0.696,0.921,0,-3.755,1,0.0363,0.00314,5.24e-05,0.0842,0.714,119.001,pop +T.I.,Live Your Life,338853,True,2008,75,0.375,0.862,11,-3.363,0,0.255,0.071,0.0,0.211,0.478,159.841,"hip hop, pop" +Eminem,We Made You,269613,False,2009,63,0.924,0.853,2,-1.203,1,0.0792,0.107,1.45e-06,0.129,0.67,114.003,hip hop +Dizzee Rascal,Bonkers,177573,False,2011,38,0.624,0.977,11,-3.34,0,0.22,0.00615,0.0,0.253,0.74,126.127,"hip hop, pop, Dance/Electronic" +Ciara,Love Sex Magic (feat. Justin Timberlake),220426,False,2009,58,0.893,0.666,10,-5.089,0,0.138,0.0206,0.0,0.342,0.874,107.011,"pop, R&B" +Rihanna,Russian Roulette,227533,True,2009,64,0.48,0.486,6,-5.754,0,0.0447,0.046,0.0,0.107,0.265,80.051,"hip hop, pop, R&B" +Soulja Boy,Turn My Swag On,206333,False,2008,59,0.546,0.712,3,-3.104,0,0.0269,0.000969,0.0,0.406,0.489,150.154,"hip hop, pop" +The xx,Intro,127920,False,2009,2,0.617,0.778,9,-8.871,0,0.027,0.459,0.925,0.128,0.152,100.363,rock +Jay Sean,Down,212106,False,2009,3,0.657,0.695,2,-4.493,1,0.0321,0.0108,0.0,0.0822,0.683,65.997,"hip hop, pop, R&B" +Keri Hilson,Turnin Me On - Original Dirty,248066,True,2008,41,0.661,0.584,1,-5.09,1,0.061,0.00293,4.82e-06,0.134,0.0949,159.873,"pop, R&B" +David Guetta,When Love Takes Over (feat. Kelly Rowland),191000,False,2010,69,0.675,0.862,11,-4.614,1,0.0253,0.0165,0.000427,0.169,0.498,129.967,"hip hop, pop, Dance/Electronic" +Cheryl,Fight For This Love,223253,False,2009,65,0.739,0.741,7,-5.873,1,0.0912,0.013,0.0,0.0679,0.727,122.988,"pop, Dance/Electronic" +Soulja Boy,Kiss Me Thru The Phone,193386,True,2008,76,0.758,0.712,7,-3.781,1,0.112,0.0185,0.0,0.0677,0.795,149.998,"hip hop, pop" +Eminem,Crack A Bottle,297520,True,2009,70,0.516,0.874,9,-2.571,1,0.186,0.0864,0.0,0.173,0.391,169.561,hip hop +P!nk,Sober,251440,False,2010,46,0.614,0.792,6,-4.907,1,0.0299,0.0653,0.0,0.195,0.407,91.066,pop +Tinchy Stryder,Number 1,212773,False,2009,59,0.581,0.824,0,-3.912,0,0.0377,0.0105,0.0,0.483,0.747,114.912,"pop, Dance/Electronic" +Pixie Lott,"Mama Do (Uh Oh, Uh Oh)",196520,False,2009,54,0.451,0.873,3,-4.711,0,0.19,0.171,0.0,0.327,0.589,119.918,"pop, Dance/Electronic" +JLS,Beat Again - Radio Edit,196640,False,2009,0,0.894,0.93,11,-4.577,1,0.0712,0.0355,0.0,0.0759,0.968,120.02,pop +3OH!3,DONTTRUSTME,192573,True,2008,70,0.791,0.713,5,-3.742,0,0.254,0.0163,0.0,0.189,0.514,130.012,"hip hop, pop, rock" +A.R. Rahman,Jai Ho! (You Are My Destiny),222400,False,2009,65,0.657,0.941,8,-3.919,0,0.061,0.0476,0.0,0.0797,0.879,136.202,set() +The Pussycat Dolls,I Hate This Part,218400,False,2008,64,0.756,0.612,5,-4.371,1,0.0317,0.0659,0.0,0.274,0.452,111.572,"pop, R&B" +Akon,Beautiful,312986,False,2008,65,0.74,0.945,0,-4.442,0,0.0889,0.123,0.0,0.112,0.629,130.015,pop +Alexandra Burke,Bad Boys (feat. Flo Rida),206480,False,2009,57,0.67,0.866,1,-3.684,1,0.0538,0.0115,0.0,0.358,0.636,140.029,pop +Mario,Break Up,249026,True,2009,51,0.44,0.517,0,-7.355,1,0.363,0.428,0.0,0.173,0.475,73.881,"pop, R&B" +Drake,Forever,357706,True,2009,73,0.457,0.906,5,-2.278,0,0.342,0.249,0.0,0.182,0.54,104.02,"hip hop, pop, R&B" +Selena Gomez & The Scene,Naturally,202586,False,2009,63,0.605,0.902,10,-5.406,0,0.0511,0.0185,1.01e-06,0.0534,0.875,132.612,"pop, Dance/Electronic" +Kanye West,Love Lockdown,270306,False,2008,66,0.76,0.524,1,-7.67,0,0.0323,0.0542,0.5,0.112,0.112,119.603,hip hop +The Veronicas,Untouched,255360,False,2005,58,0.557,0.783,6,-4.893,0,0.234,0.0171,0.0172,0.151,0.442,177.008,"pop, Dance/Electronic" +Fabolous,Throw It In The Bag,231573,True,2009,59,0.789,0.55,11,-7.423,0,0.0473,0.174,0.0,0.0614,0.696,172.049,"hip hop, pop, R&B" +Jason Derulo,Whatcha Say,221253,False,2010,70,0.615,0.711,11,-5.507,1,0.0779,0.0444,0.0,0.145,0.711,144.036,"hip hop, pop" +Lily Allen,Not Fair,201213,True,2009,54,0.719,0.861,5,-6.982,1,0.0402,0.0447,0.00989,0.219,0.948,121.491,pop +T.I.,Dead And Gone,299746,True,2008,68,0.713,0.746,0,-4.99,1,0.259,0.0402,0.0,0.601,0.47,135.021,"hip hop, pop" +Sean Kingston,Fire Burning,239986,False,2009,71,0.839,0.804,1,-2.513,1,0.0329,0.0192,0.0,0.331,0.888,122.973,"hip hop, pop, R&B" +Empire of the Sun,Walking On A Dream,198440,False,2008,77,0.871,0.701,5,-5.594,0,0.0458,0.257,7.52e-06,0.0589,0.716,126.975,"rock, pop, Dance/Electronic" +Cobra Starship,Good Girls Go Bad (feat. Leighton Meester),196413,False,2009,65,0.594,0.874,0,-3.716,1,0.0815,0.0116,0.0,0.549,0.628,119.964,"pop, Dance/Electronic" +Nickelback,If Today Was Your Last Day,249066,False,2008,66,0.485,0.911,3,-5.749,1,0.0355,8.67e-05,0.0,0.092,0.568,89.956,"rock, metal" +La Roux,Bulletproof,205733,False,2009,71,0.674,0.882,3,-2.771,0,0.0477,0.000441,6.47e-05,0.068,0.682,123.016,"pop, Dance/Electronic" +Kasabian,Fire,252279,False,2009,64,0.525,0.742,10,-5.643,1,0.0311,0.0874,0.149,0.117,0.179,117.029,rock +Calvin Harris,I'm Not Alone - Radio Edit,210973,False,2009,55,0.597,0.684,7,-6.614,1,0.0321,0.00481,0.105,0.317,0.435,130.99,"hip hop, pop, Dance/Electronic" +Justin Bieber,One Time,215866,False,2009,71,0.691,0.853,1,-2.528,0,0.0372,0.0631,7.13e-05,0.082,0.762,145.999,pop +Jamie Foxx,Blame It,289746,True,2008,65,0.673,0.614,7,-5.426,1,0.103,0.0782,0.0,0.139,0.362,176.052,"hip hop, pop, R&B" +La Roux,In For The Kill,248626,False,2009,59,0.629,0.969,8,-0.276,0,0.0455,0.00184,0.0,0.122,0.905,150.01,"pop, Dance/Electronic" +Muse,Uprising,304840,False,2009,75,0.602,0.905,2,-4.046,1,0.0775,0.000202,0.064,0.117,0.411,128.019,rock +Nickelback,Gotta Be Somebody,252653,False,2008,62,0.536,0.89,0,-5.222,1,0.0601,0.000354,0.00165,0.133,0.205,115.998,"rock, metal" +James Morrison,Broken Strings,250453,False,2008,70,0.57,0.717,1,-4.914,1,0.029,0.00967,0.0,0.0833,0.321,111.91,"pop, R&B" +T.I.,Whatever You Like,249533,True,2008,74,0.68,0.687,9,-6.162,0,0.0709,0.0161,0.0,0.261,0.467,150.053,"hip hop, pop" +Dizzee Rascal,Holiday,220626,False,2011,60,0.545,0.918,0,-1.925,1,0.0448,0.0571,0.0,0.0415,0.855,117.985,"hip hop, pop, Dance/Electronic" +Beyoncé,Single Ladies (Put a Ring on It),193213,False,2008,67,0.426,0.584,1,-5.293,1,0.296,0.0383,0.0,0.188,0.272,193.437,"pop, R&B" +Boys Like Girls,Love Drunk,226706,False,2009,64,0.44,0.976,10,-3.17,1,0.141,0.00177,3.39e-06,0.16,0.412,150.005,pop +The-Dream,Rockin' That Shit,221920,True,2009,62,0.672,0.62,8,-4.865,1,0.0422,0.123,0.0,0.362,0.627,78.005,"hip hop, pop, R&B" +OneRepublic,Secrets,224693,False,2009,74,0.516,0.764,2,-6.223,1,0.0366,0.0717,0.0,0.115,0.376,148.021,pop +JLS,Everybody in Love,195586,False,2013,39,0.705,0.783,5,-5.971,0,0.0327,0.0703,0.0,0.36,0.656,140.022,pop +Jason Aldean,Big Green Tractor,204200,False,2009,50,0.616,0.596,4,-5.035,1,0.0289,0.654,0.0,0.218,0.58,137.077,country +The Fray,Never Say Never,256613,False,2009,67,0.23,0.492,8,-5.767,1,0.0317,0.568,8.18e-06,0.176,0.262,160.139,pop +Lily Allen,The Fear,207120,True,2009,53,0.661,0.847,10,-6.948,1,0.0404,0.419,8.82e-05,0.107,0.522,134.002,pop +Kings of Leon,Sex on Fire,203346,False,2008,80,0.542,0.905,9,-5.653,1,0.054,0.00172,0.0104,0.136,0.374,153.398,rock +Drake,Best I Ever Had,258760,True,2010,54,0.431,0.894,5,-2.673,0,0.33,0.0951,0.0,0.188,0.605,162.161,"hip hop, pop, R&B" +Kelly Clarkson,Already Gone,281560,False,2009,56,0.209,0.872,9,-2.996,1,0.0757,0.217,0.0,0.0768,0.294,78.139,"pop, R&B" +Miley Cyrus,Party In The U.S.A.,202066,False,2009,79,0.652,0.698,10,-4.667,0,0.042,0.00112,0.000115,0.0886,0.47,96.021,pop +Jordin Sparks,Battlefield,241920,False,2009,57,0.613,0.634,2,-3.472,1,0.0339,0.0178,0.0,0.0639,0.37,144.953,"pop, R&B" +Ne-Yo,Mad,254533,False,2008,64,0.731,0.644,0,-5.348,1,0.0343,0.665,0.0,0.11,0.691,129.94,"pop, R&B" +Green Day,21 Guns,321093,False,2009,73,0.268,0.742,5,-4.939,1,0.0355,0.0518,0.0,0.626,0.416,159.779,rock +Keri Hilson,Knock You Down,326186,False,2009,57,0.588,0.877,8,-4.78,1,0.16,0.00952,0.0,0.171,0.645,155.165,"pop, R&B" +Taylor Swift,You Belong With Me,231146,False,2008,55,0.687,0.771,6,-4.424,1,0.0384,0.164,2.46e-05,0.112,0.445,129.964,pop +Shinedown,Second Chance,222066,False,2008,67,0.46,0.796,0,-4.501,0,0.0333,0.00107,0.0,0.106,0.182,100.011,"rock, metal" +Asher Roth,I Love College,241933,True,2009,62,0.713,0.826,8,-4.075,1,0.243,0.0897,0.0,0.421,0.664,86.444,"hip hop, pop" +Katy Perry,Waking Up In Vegas,199186,False,2008,49,0.524,0.878,5,-3.108,0,0.0346,0.0012,0.0,0.098,0.59,130.989,pop +Kelly Clarkson,My Life Would Suck Without You,211493,False,2009,68,0.526,0.882,9,-4.006,1,0.0509,0.0014,0.0,0.144,0.424,144.982,"pop, R&B" +Mumford & Sons,Little Lion Man,245173,True,2009,68,0.517,0.492,5,-8.05,1,0.0272,0.0275,3.19e-05,0.0873,0.455,138.585,"Folk/Acoustic, rock, pop" +The Fray,You Found Me,241853,False,2009,73,0.338,0.803,8,-5.412,0,0.0413,0.0187,0.0,0.136,0.4,151.994,pop +Bruno Mars,Just the Way You Are,220734,False,2010,77,0.635,0.841,5,-5.379,1,0.0422,0.0134,0.0,0.0622,0.424,109.021,pop +Eminem,Love The Way You Lie,263373,True,2010,81,0.749,0.925,10,-5.034,1,0.227,0.241,0.0,0.52,0.641,86.989,hip hop +Lady Gaga,Bad Romance,294573,True,2009,80,0.696,0.921,0,-3.755,1,0.0363,0.00314,5.24e-05,0.0842,0.714,119.001,pop +Stromae,Alors on danse - Radio Edit,206066,False,2010,77,0.791,0.59,1,-9.206,0,0.0793,0.0994,0.00203,0.065,0.714,119.951,pop +Kesha,TiK ToK,199693,False,2010,80,0.755,0.837,2,-2.718,0,0.142,0.0991,0.0,0.289,0.714,120.028,"pop, Dance/Electronic" +David Guetta,Memories (feat. Kid Cudi),210853,False,2010,76,0.546,0.916,8,-3.932,1,0.255,0.00144,4.34e-06,0.251,0.375,129.983,"hip hop, pop, Dance/Electronic" +Taio Cruz,Dynamite,202613,False,2010,80,0.751,0.783,4,-3.724,1,0.0859,0.00379,0.0,0.036,0.816,119.975,"hip hop, pop" +Rihanna,Only Girl (In The World),235493,False,2010,73,0.789,0.716,11,-4.241,0,0.0432,0.129,1.07e-05,0.069,0.611,125.906,"hip hop, pop, R&B" +Katy Perry,California Gurls,234653,False,2012,72,0.791,0.754,0,-3.729,1,0.0569,0.00446,0.0,0.163,0.425,125.014,pop +Kesha,Take It Off,215200,False,2010,68,0.729,0.675,5,-5.292,0,0.0286,4.14e-05,0.00126,0.0867,0.74,125.036,"pop, Dance/Electronic" +INNA,Hot - Play & Win Radio Version,217036,False,2010,42,0.817,0.939,7,-6.079,0,0.28,0.0798,0.441,0.431,0.599,128.0,pop +Far East Movement,Like A G6,216893,False,2010,73,0.435,0.837,3,-8.126,1,0.449,0.00676,0.0,0.117,0.778,124.913,"hip hop, pop" +David Guetta,Gettin' Over You (feat. Fergie & LMFAO),188000,False,2010,62,0.615,0.913,10,-5.077,0,0.0816,0.178,0.0,0.0773,0.45,129.944,"hip hop, pop, Dance/Electronic" +Duck Sauce,Barbra Streisand - Radio Edit,196533,False,2010,46,0.769,0.922,1,-1.966,1,0.108,0.000939,0.197,0.233,0.506,127.965,Dance/Electronic +Lucenzo,Danza Kuduro (feat. Don Omar),213986,False,2011,0,0.622,0.942,7,-6.365,1,0.112,0.00713,0.0,0.0715,0.773,130.003,latin +Rihanna,Te Amo,208426,True,2009,67,0.567,0.707,8,-5.455,0,0.0818,0.541,0.000176,0.1,0.751,171.917,"hip hop, pop, R&B" +Timbaland,Carry Out (Featuring Justin Timberlake),232466,False,2009,69,0.531,0.574,10,-6.693,0,0.113,0.114,0.0308,0.256,0.272,115.68,"hip hop, pop, R&B" +DJ Fresh,Gold Dust - Radio Edit,192446,False,2010,63,0.451,0.948,0,-0.74,1,0.147,0.255,0.0,0.392,0.295,176.985,"pop, Dance/Electronic" +Edward Maya,Stereo Love - Radio Edit,184573,False,2010,66,0.799,0.783,1,-3.896,0,0.0322,0.0346,0.0186,0.0757,0.586,127.041,pop +Taio Cruz,Break Your Heart,201546,False,2010,74,0.607,0.934,3,-4.217,1,0.0314,0.0327,0.0,0.0909,0.568,122.01,"hip hop, pop" +Black Eyed Peas,Imma Be,257560,False,2009,61,0.597,0.517,0,-6.963,1,0.365,0.179,0.0,0.307,0.412,92.035,"hip hop, pop" +Sidney Samson,Riverside,320348,True,2009,47,0.804,0.976,1,-2.458,0,0.0445,0.00023,0.894,0.125,0.237,125.997,Dance/Electronic +Eminem,Not Afraid,248133,True,2010,79,0.855,0.954,0,-1.19,0,0.264,0.529,0.0,0.205,0.668,114.635,hip hop +Lady Gaga,Alejandro,274213,False,2009,67,0.623,0.793,11,-6.63,0,0.0462,0.000397,0.0015,0.375,0.36,98.998,pop +Rihanna,Rude Boy,222920,True,2009,74,0.563,0.75,11,-4.496,1,0.127,0.113,0.0,0.0788,0.812,173.906,"hip hop, pop, R&B" +Ne-Yo,Beautiful Monster,251573,False,2010,57,0.708,0.771,5,-6.99,0,0.131,0.209,5.32e-06,0.277,0.494,128.013,"pop, R&B" +3OH!3,Starstrukk,203093,False,2010,32,0.605,0.795,11,-6.086,0,0.0712,0.00153,0.0,0.207,0.263,139.896,"hip hop, pop, rock" +Black Eyed Peas,The Time (Dirty Bit),307640,False,2010,69,0.816,0.813,6,-7.798,0,0.0664,0.0662,2.77e-06,0.602,0.436,127.979,"hip hop, pop" +Kelly Rowland,Commander,218106,False,2011,59,0.395,0.876,11,-3.859,0,0.138,0.0173,8.46e-06,0.362,0.567,124.638,"hip hop, pop, R&B" +Swedish House Mafia,One (Your Name) - Radio Edit,163246,False,2010,63,0.733,0.673,9,-6.572,0,0.0333,0.0132,0.0049,0.16,0.636,125.061,"pop, Dance/Electronic" +Chris Brown,Deuces (feat. Tyga & Kevin McCall),276560,True,2011,68,0.692,0.736,1,-5.109,1,0.11,0.0324,0.0,0.0787,0.217,73.987,"hip hop, pop, R&B" +Usher,DJ Got Us Fallin' In Love (feat. Pitbull),220800,False,2010,79,0.663,0.861,7,-3.398,0,0.109,0.0338,0.0,0.082,0.654,119.963,"hip hop, pop, R&B" +Iyaz,Replay,182306,False,2009,75,0.706,0.751,9,-6.323,1,0.0708,0.173,0.0,0.168,0.195,91.031,"hip hop, pop" +Flo Rida,Club Can't Handle Me (feat. David Guetta),234560,False,2010,76,0.616,0.869,0,-3.911,1,0.0327,0.0283,0.0,0.064,0.473,127.966,"hip hop, pop" +Sean Kingston,Eenie Meenie,201946,False,2010,74,0.72,0.607,1,-4.168,1,0.0322,0.0543,0.0,0.113,0.828,121.223,"hip hop, pop, R&B" +Waka Flocka Flame,No Hands (feat. Roscoe Dash & Wale),263773,True,2010,58,0.76,0.595,1,-6.366,1,0.0391,0.00544,0.0,0.241,0.361,131.497,"hip hop, pop" +Tinie Tempah,Written in the Stars (feat. Eric Turner),207653,False,2010,57,0.656,0.931,7,-4.188,1,0.0981,0.0613,0.0,0.219,0.534,91.881,"hip hop, pop, Dance/Electronic" +Bruno Mars,Talking to the Moon,217866,False,2010,76,0.498,0.59,1,-4.721,0,0.032,0.511,0.0,0.107,0.0784,145.867,pop +Miley Cyrus,Can't Be Tamed,168213,False,2010,0,0.63,0.91,11,-2.919,0,0.144,0.0287,0.0,0.196,0.743,116.98,pop +Mike Posner,Cooler Than Me - Single Mix,213293,False,2010,75,0.768,0.82,7,-4.63,0,0.0474,0.179,0.0,0.689,0.625,129.965,"hip hop, pop, Dance/Electronic" +Lady Gaga,Telephone,220640,False,2009,69,0.824,0.836,3,-5.903,1,0.0404,0.00521,0.000817,0.112,0.716,122.014,pop +Yolanda Be Cool,We No Speak Americano (JT Radio Edit),157438,False,2010,1,0.901,0.805,6,-5.005,1,0.0464,0.0712,0.0812,0.0923,0.737,124.996,Dance/Electronic +Rihanna,Hard,250600,True,2009,55,0.31,0.746,1,-3.599,1,0.111,0.0121,0.0,0.649,0.163,182.032,"hip hop, pop, R&B" +Tinie Tempah,Pass Out,268053,True,2010,53,0.693,0.891,1,-3.261,1,0.296,0.0281,0.0,0.143,0.532,91.1,"hip hop, pop, Dance/Electronic" +Adam Lambert,Whataya Want from Me,227320,False,2009,57,0.438,0.672,11,-4.706,0,0.0429,0.0055,0.0,0.0583,0.457,185.934,pop +Trey Songz,Bottoms Up (feat. Nicki Minaj),242013,True,2010,0,0.845,0.601,1,-5.283,1,0.161,0.0205,0.0,0.385,0.329,74.008,"hip hop, pop, R&B" +Timbaland,If We Ever Meet Again (Featuring Katy Perry),292706,False,2009,68,0.652,0.605,8,-7.371,1,0.0393,0.00481,0.0,0.0605,0.394,126.091,"hip hop, pop, R&B" +Enrique Iglesias,I Like It,231373,False,2010,63,0.648,0.942,10,-2.881,0,0.0878,0.021,0.0,0.0594,0.73,129.007,"pop, latin" +Tim Berg,Seek Bromance - Avicii’s Vocal Edit,203960,False,2010,0,0.515,0.837,11,-2.552,1,0.0401,0.0868,0.0,0.0403,0.52,126.026,"pop, Dance/Electronic" +Usher,OMG (feat. will.i.am),269493,False,2010,73,0.781,0.745,4,-5.81,0,0.0332,0.198,1.14e-05,0.36,0.326,129.998,"hip hop, pop, R&B" +Maroon 5,Misery,216200,False,2010,67,0.703,0.81,4,-4.874,0,0.0424,0.000315,0.0,0.216,0.726,102.978,pop +Jay Sean,Down,212106,False,2009,3,0.657,0.695,2,-4.493,1,0.0321,0.0108,0.0,0.0822,0.683,65.997,"hip hop, pop, R&B" +Rihanna,Man Down,267000,True,2010,68,0.47,0.904,0,-4.024,0,0.177,0.0436,0.0,0.0491,0.557,155.788,"hip hop, pop, R&B" +3OH!3,My First Kiss (feat. Ke$ha),192440,False,2010,62,0.682,0.889,0,-4.166,1,0.0804,0.00564,0.0,0.36,0.827,138.021,"hip hop, pop, rock" +Justin Bieber,Baby,214240,False,2010,81,0.728,0.859,5,-5.237,0,0.137,0.0401,0.0,0.111,0.535,65.043,pop +Cali Swag District,Teach Me How to Dougie,237480,True,2011,66,0.846,0.438,11,-4.981,1,0.141,0.2,9.43e-05,0.0939,0.512,85.013,"hip hop, pop" +Swedish House Mafia,Miami 2 Ibiza - Swedish House Mafia vs. Tinie Tempah,206460,True,2010,54,0.736,0.929,7,-5.89,0,0.0674,0.00237,1.11e-05,0.402,0.658,125.03,"pop, Dance/Electronic" +K'NAAN,Wavin' Flag,220520,False,2009,57,0.625,0.699,0,-6.416,1,0.0729,0.13,0.0,0.238,0.717,75.974,set() +Ludacris,How Low,201586,True,2010,63,0.785,0.498,1,-6.977,1,0.0533,0.00248,1.23e-06,0.224,0.418,143.96,"hip hop, pop" +Kesha,Blah Blah Blah (feat. 3OH!3),172053,True,2010,61,0.752,0.836,10,-3.173,1,0.115,0.0843,0.000425,0.424,0.519,120.003,"pop, Dance/Electronic" +DJ Khaled,"All I Do Is Win (feat. T-Pain, Ludacris, Snoop Dogg & Rick Ross)",232506,True,2010,51,0.544,0.781,10,-3.616,1,0.189,0.014,0.0,0.161,0.277,150.1,"hip hop, pop" +Jason Derulo,In My Head,199026,False,2010,65,0.762,0.748,0,-4.15,0,0.033,0.0266,0.0,0.348,0.851,110.009,"hip hop, pop" +Aloe Blacc,I Need A Dollar,243053,False,2010,1,0.84,0.482,6,-7.116,0,0.0333,0.202,0.0,0.0873,0.957,95.498,R&B +B.o.B,Airplanes (feat. Hayley Williams of Paramore),180480,True,2010,74,0.66,0.867,6,-4.285,0,0.116,0.11,0.0,0.0368,0.377,93.033,"hip hop, pop" +Ludacris,My Chick Bad,216933,True,2010,68,0.624,0.723,1,-6.782,1,0.378,0.172,0.0,0.483,0.757,84.789,"hip hop, pop" +Nelly,Just A Dream,237800,False,2010,73,0.531,0.752,1,-6.161,1,0.0305,0.0421,0.0,0.12,0.103,89.917,"hip hop, pop, R&B" +Rihanna,What's My Name?,263173,False,2010,66,0.692,0.786,2,-2.959,1,0.069,0.23,0.0,0.0797,0.583,100.025,"hip hop, pop, R&B" +Shontelle,Impossible,226533,False,2010,35,0.599,0.624,8,-3.631,1,0.0343,0.385,0.0,0.125,0.539,90.034,"pop, R&B" +Take That,The Flood,289359,False,2010,57,0.514,0.83,11,-6.077,0,0.044,0.0378,1.43e-06,0.122,0.138,100.631,pop +Trey Songz,Say Aah (feat. Fabolous),207546,False,2009,54,0.724,0.87,1,-3.614,0,0.113,0.00453,0.0,0.833,0.81,93.01,"hip hop, pop, R&B" +Jason Derulo,Whatcha Say,221253,False,2010,70,0.615,0.711,11,-5.507,1,0.0779,0.0444,0.0,0.145,0.711,144.036,"hip hop, pop" +Drake,Over,233560,True,2010,57,0.325,0.848,7,-5.611,1,0.279,0.0109,0.0,0.124,0.433,100.093,"hip hop, pop, R&B" +Example,Kickstarts,181826,False,2010,63,0.61,0.836,5,-4.455,1,0.0573,0.00374,0.0,0.358,0.657,126.056,"pop, Dance/Electronic" +Plan B,She Said,208853,False,2010,62,0.72,0.538,3,-5.85,0,0.141,0.301,3.03e-06,0.144,0.815,147.007,Dance/Electronic +The Saturdays,Higher,207613,False,2010,61,0.639,0.862,10,-3.292,1,0.0341,0.049,6.81e-06,0.264,0.596,117.011,"pop, Dance/Electronic" +Jay Sean,Do You Remember,210306,False,2009,2,0.855,0.668,11,-4.892,1,0.0644,0.0242,0.0,0.102,0.803,125.846,"hip hop, pop, R&B" +Usher,Hey Daddy (Daddy's Home),224093,False,2010,60,0.59,0.698,11,-4.262,1,0.0286,0.000176,0.0,0.107,0.352,95.975,"hip hop, pop, R&B" +Lil Wayne,Right Above It,271946,True,2010,68,0.376,0.841,3,-4.348,0,0.356,0.0435,0.0,0.578,0.463,76.052,"hip hop, pop" +The Wanted,All Time Low,205200,False,2010,18,0.689,0.615,8,-5.022,0,0.0563,0.352,0.0,0.0789,0.783,134.036,"pop, Dance/Electronic" +Kesha,Your Love Is My Drug,187133,False,2010,69,0.826,0.612,1,-3.891,1,0.0982,0.00681,0.0,0.0889,0.756,120.057,"pop, Dance/Electronic" +Ellie Goulding,Starry Eyed,176613,False,2010,54,0.504,0.814,5,-5.346,0,0.0389,0.135,1.42e-05,0.324,0.596,149.967,"pop, rock, Dance/Electronic" +Drake,Find Your Love,208946,False,2010,56,0.625,0.613,6,-6.005,0,0.173,0.0209,0.0,0.0286,0.738,96.033,"hip hop, pop, R&B" +Jason Derulo,Ridin' Solo,215746,False,2010,66,0.442,0.83,9,-4.02,1,0.146,0.128,0.0,0.129,0.578,89.338,"hip hop, pop" +Two Door Cinema Club,What You Know,189693,False,2010,0,0.55,0.753,6,-4.003,0,0.0407,0.000665,7.74e-06,0.0921,0.841,139.048,"World/Traditional, rock, pop" +JAY-Z,Young Forever,253906,True,2009,65,0.637,0.69,9,-3.214,0,0.0693,0.417,0.0,0.211,0.103,140.329,hip hop +Young Money,BedRock,288133,True,2009,71,0.733,0.664,8,-6.163,1,0.295,0.102,0.0,0.191,0.557,148.005,"hip hop, pop" +B.o.B,Nothin' on You (feat. Bruno Mars),268320,False,2010,74,0.688,0.853,10,-5.814,1,0.0493,0.386,0.0,0.0862,0.743,103.993,"hip hop, pop" +Katy Perry,Teenage Dream,227741,False,2010,69,0.719,0.798,10,-4.582,1,0.0361,0.0162,2.34e-06,0.134,0.591,120.011,pop +Scouting For Girls,This Ain't a Love Song,210680,False,2017,52,0.458,0.905,0,-4.157,1,0.0451,0.000431,0.0,0.378,0.553,176.667,"pop, rock" +Travie McCoy,Billionaire (feat. Bruno Mars),211160,True,2010,72,0.633,0.673,6,-6.403,0,0.258,0.297,0.0,0.206,0.659,86.776,"hip hop, pop" +Owl City,Fireflies,228346,False,2009,78,0.512,0.662,3,-6.797,1,0.0439,0.0275,0.0,0.118,0.472,180.114,"rock, pop" +Alicia Keys,Empire State of Mind (Part II) Broken Down,216480,False,2009,71,0.484,0.368,6,-7.784,1,0.0341,0.74,3.82e-05,0.118,0.142,92.923,"pop, R&B" +Lady A,Need You Now,236440,False,2010,73,0.581,0.717,4,-4.433,1,0.0318,0.0298,0.000186,0.243,0.316,107.884,"pop, country" +Taylor Swift,Back To December,293026,False,2010,65,0.529,0.67,2,-4.663,1,0.0303,0.117,0.0,0.334,0.286,141.893,pop +Neon Trees,Animal,212306,False,2010,64,0.482,0.829,5,-5.576,1,0.0437,0.00034,0.0,0.378,0.739,147.99,"rock, pop" +Kris Allen,Live Like We're Dying,212506,False,2009,51,0.589,0.893,0,-2.948,1,0.0397,0.0273,0.0,0.343,0.94,92.011,pop +Matt Cardle,When We Collide,226000,False,2011,46,0.443,0.683,2,-5.521,1,0.0343,0.0198,5.26e-06,0.313,0.447,81.986,pop +Robyn,Dancing On My Own,288670,False,2010,0,0.687,0.865,6,-4.663,1,0.0349,0.0743,0.225,0.0966,0.261,117.015,"pop, Dance/Electronic" +The Band Perry,If I Die Young,222773,False,2010,64,0.606,0.497,4,-6.611,1,0.0277,0.348,0.0,0.275,0.362,130.739,country +B.o.B,Magic (feat. Rivers Cuomo),196133,False,2010,65,0.549,0.932,8,-4.11,0,0.343,0.0127,0.0,0.347,0.787,82.439,"hip hop, pop" +Adele,Rolling in the Deep,228093,True,2011,2,0.73,0.77,8,-5.114,1,0.0298,0.138,0.0,0.0473,0.507,104.948,"pop, R&B" +Maroon 5,"Moves Like Jagger - Studio Recording From ""The Voice"" Performance",201493,False,2010,77,0.722,0.761,11,-4.459,0,0.0475,0.0117,0.0,0.315,0.624,128.044,pop +LMFAO,Party Rock Anthem,262146,False,2011,73,0.751,0.736,5,-4.168,0,0.156,0.0206,0.0,0.265,0.352,130.014,"hip hop, pop, Dance/Electronic" +Katy Perry,Firework,227893,False,2010,72,0.638,0.832,8,-5.039,1,0.049,0.141,0.0,0.113,0.648,124.071,pop +Bruno Mars,Grenade,222091,False,2010,78,0.704,0.558,2,-7.273,0,0.0542,0.148,0.0,0.107,0.245,110.444,pop +Pitbull,Hey Baby (Drop It to the Floor) (feat. T-Pain),234453,False,2011,71,0.595,0.912,10,-3.428,0,0.0884,0.0434,0.0,0.259,0.762,128.024,"hip hop, pop, latin" +Alexandra Stan,Mr. Saxobeat,195105,False,2011,0,0.726,0.931,11,-4.152,0,0.0468,0.0218,0.000283,0.143,0.797,126.976,"pop, Dance/Electronic" +David Guetta,Who's That Chick? (feat. Rihanna),201040,False,2010,71,0.675,0.602,11,-4.733,0,0.116,0.00377,0.0,0.0458,0.933,127.938,"hip hop, pop, Dance/Electronic" +Jennifer Lopez,On The Floor,284866,False,2011,79,0.73,0.777,3,-5.194,0,0.0496,0.105,0.000478,0.0691,0.575,130.0,"hip hop, pop, R&B" +Rihanna,S&M,243533,False,2010,73,0.767,0.682,1,-5.02,1,0.042,0.0113,0.00016,0.104,0.833,127.975,"hip hop, pop, R&B" +Kesha,Blow,219973,False,2010,69,0.753,0.729,11,-3.862,0,0.0392,0.00334,5.66e-05,0.073,0.812,120.013,"pop, Dance/Electronic" +Snoop Dogg,Sweat - Remix,195986,False,2011,68,0.813,0.732,7,-5.636,1,0.03,0.0597,0.00137,0.0826,0.731,130.02,"hip hop, pop" +Taio Cruz,Higher,187626,False,2010,61,0.672,0.907,8,-5.069,0,0.0721,0.0048,3.4e-05,0.156,0.746,128.027,"hip hop, pop" +Usher,More - RedOne Jimmy Joker Remix,219986,False,2010,67,0.551,0.893,7,-2.628,1,0.0543,0.00166,0.0,0.348,0.794,125.083,"hip hop, pop, R&B" +Example,Changed the Way You Kiss Me - Radio Edit,195466,False,2011,42,0.578,0.857,4,-3.78,0,0.041,0.00548,0.00162,0.0948,0.188,126.979,"pop, Dance/Electronic" +Black Eyed Peas,Just Can’t Get Enough,219426,False,2010,76,0.659,0.628,0,-8.685,0,0.179,0.186,0.0,0.105,0.262,94.05,"hip hop, pop" +David Guetta,Where Them Girls At (feat. Nicki Minaj & Flo Rida),194840,True,2012,72,0.666,0.876,3,-3.078,1,0.0414,0.055,0.0,0.259,0.552,129.884,"hip hop, pop, Dance/Electronic" +Lykke Li,I Follow Rivers - The Magician Remix,281106,False,2011,51,0.786,0.709,9,-5.737,0,0.0395,0.0192,0.00118,0.0845,0.285,122.019,"pop, Dance/Electronic" +Pitbull,"Give Me Everything (feat. Ne-Yo, Afrojack & Nayer)",252306,False,2011,81,0.671,0.939,8,-3.206,1,0.161,0.191,0.0,0.298,0.53,129.024,"hip hop, pop, latin" +Selena Gomez & The Scene,Love You Like A Love Song,188453,False,2011,79,0.858,0.678,1,-3.87,0,0.0469,0.0761,0.0,0.0741,0.922,117.009,"pop, Dance/Electronic" +Wiz Khalifa,Black and Yellow,217666,True,2011,75,0.684,0.834,2,-4.524,0,0.0675,0.0646,0.0,0.271,0.538,164.02,"hip hop, pop" +Katy Perry,E.T.,229573,False,2012,65,0.62,0.869,1,-5.252,1,0.175,0.0181,0.0,0.369,0.76,151.684,pop +Britney Spears,I Wanna Go,210266,False,2011,66,0.696,0.546,5,-6.55,1,0.0414,0.00379,3.8e-06,0.332,0.787,130.002,pop +Gym Class Heroes,Stereo Hearts (feat. Adam Levine),210960,False,2011,81,0.646,0.795,9,-3.293,1,0.0976,0.0319,0.0,0.267,0.796,89.99,"hip hop, pop" +Edward Maya,Stereo Love - Radio Edit,184573,False,2010,66,0.799,0.783,1,-3.896,0,0.0322,0.0346,0.0186,0.0757,0.586,127.041,pop +Enrique Iglesias,Tonight (I'm Fuckin' You),232213,True,2010,63,0.648,0.89,0,-3.982,0,0.0523,0.0294,3.58e-06,0.116,0.321,125.953,"pop, latin" +Jeremih,Down On Me,228453,False,2010,72,0.7,0.598,2,-7.783,1,0.114,0.0369,0.0,0.111,0.594,160.041,"hip hop, pop, R&B" +Sak Noel,Loca People - Radio Edit,215624,True,2011,49,0.926,0.808,11,-3.148,0,0.0599,0.000915,0.00327,0.0515,0.701,127.998,Dance/Electronic +Olly Murs,Heart Skips a Beat (feat. Rizzle Kicks),202266,False,2011,61,0.843,0.881,9,-3.951,1,0.0581,0.14,0.0,0.0765,0.876,110.621,"pop, Dance/Electronic" +LMFAO,Sexy And I Know It,199480,False,2011,67,0.707,0.861,7,-4.225,1,0.316,0.1,0.0,0.191,0.795,130.021,"hip hop, pop, Dance/Electronic" +Cobra Starship,You Make Me Feel... (feat. Sabi),215693,False,2011,70,0.668,0.857,7,-2.944,0,0.0535,0.0191,6.71e-06,0.0385,0.748,131.959,"pop, Dance/Electronic" +Jessie J,Do It Like A Dude,195240,True,2011,57,0.663,0.843,2,-3.672,1,0.049,0.0491,0.0,0.364,0.375,140.036,"hip hop, pop" +DJ Fresh,Louder (feat. Sian Evans) - Radio Edit,206776,False,2012,55,0.31,0.926,6,-1.131,0,0.0464,0.0184,0.00792,0.483,0.493,139.85,"pop, Dance/Electronic" +Waka Flocka Flame,No Hands (feat. Roscoe Dash & Wale),263773,True,2010,76,0.76,0.595,1,-6.366,1,0.0391,0.00544,0.0,0.241,0.361,131.497,"hip hop, pop" +Nickelback,When We Stand Together,190786,False,2011,65,0.446,0.9,10,-3.541,0,0.0489,0.000102,1.89e-05,0.0404,0.88,187.961,"rock, metal" +M83,Midnight City,243960,False,2011,76,0.507,0.729,11,-5.399,0,0.0393,0.0182,1.4e-06,0.0658,0.272,105.013,"rock, pop, metal, Dance/Electronic" +Britney Spears,Till the World Ends,237946,False,2011,65,0.693,0.705,8,-5.747,1,0.0665,0.0228,0.0,0.202,0.45,131.951,pop +Labrinth,Earthquake (feat. Tinie Tempah),274600,True,2012,62,0.54,0.856,0,-3.966,0,0.1,0.109,0.0,0.276,0.258,153.071,pop +Jason Derulo,Don't Wanna Go Home,206080,False,2011,63,0.671,0.808,2,-4.861,0,0.0652,0.02,0.0,0.134,0.637,121.956,"hip hop, pop" +Coldplay,Paradise,278719,False,2011,82,0.449,0.585,5,-6.761,1,0.0268,0.0509,8.75e-05,0.0833,0.212,139.631,"rock, pop" +Katy Perry,Last Friday Night (T.G.I.F.),230746,False,2012,74,0.649,0.815,3,-3.796,0,0.0415,0.00125,4.31e-05,0.671,0.765,126.03,pop +Adele,Set Fire to the Rain,242973,False,2011,2,0.603,0.67,2,-3.882,0,0.0249,0.00408,1.66e-06,0.112,0.445,107.995,"pop, R&B" +Dr. Dre,I Need A Doctor,283733,True,2011,71,0.594,0.946,3,-4.521,1,0.452,0.0869,0.0,0.306,0.397,155.826,hip hop +Lil Wayne,6 Foot 7 Foot,248586,True,2011,1,0.364,0.752,2,-5.429,1,0.304,0.0007,0.0,0.318,0.606,79.119,"hip hop, pop" +DJ Khaled,I'm On One,296146,True,2011,70,0.413,0.807,11,-3.499,0,0.318,0.0536,0.0,0.631,0.438,149.33,"hip hop, pop" +Chris Brown,Look At Me Now (feat. Lil' Wayne & Busta Rhymes),222586,True,2011,71,0.767,0.677,11,-6.128,0,0.184,0.0339,5.51e-06,0.144,0.538,146.155,"hip hop, pop, R&B" +Far East Movement,Rocketeer,211253,False,2010,61,0.664,0.845,4,-6.115,0,0.0461,0.181,0.0,0.267,0.357,96.005,"hip hop, pop" +The Wanted,Glad You Came,197935,False,2011,77,0.722,0.851,7,-3.873,0,0.0639,0.0319,0.0,0.108,0.452,126.885,"pop, Dance/Electronic" +Kesha,We R Who We R,204760,False,2010,71,0.736,0.817,8,-4.9,1,0.0407,0.00987,0.00167,0.117,0.653,119.95,"pop, Dance/Electronic" +Kelly Rowland,Motivation,230560,False,2011,65,0.744,0.672,9,-5.589,0,0.0418,0.187,0.000123,0.105,0.266,140.889,"hip hop, pop, R&B" +Rihanna,We Found Love,215226,False,2011,75,0.735,0.766,1,-4.485,1,0.0383,0.025,0.00138,0.108,0.6,127.985,"hip hop, pop, R&B" +Nicole Scherzinger,Don't Hold Your Breath,197440,False,2011,62,0.66,0.797,5,-6.096,0,0.0262,0.00646,0.000108,0.187,0.674,110.955,"pop, Dance/Electronic" +Mann,Buzzin Remix,224333,True,2011,48,0.687,0.939,10,-4.372,0,0.146,0.0887,0.0,0.283,0.539,104.029,"hip hop, pop" +Chris Brown,Yeah 3x,241666,True,2011,69,0.705,0.882,11,-3.201,0,0.0445,0.000369,1.16e-06,0.0934,0.7,130.0,"hip hop, pop, R&B" +Swedish House Mafia,Save The World,213337,False,2011,65,0.507,0.665,0,-7.598,1,0.0474,0.0144,0.0,0.0759,0.4,126.879,"pop, Dance/Electronic" +Kanye West,All Of The Lights,299613,True,2010,76,0.531,0.803,1,-3.284,1,0.0717,0.0796,1.71e-05,0.176,0.221,142.113,hip hop +Miguel,Sure Thing,195373,False,2010,78,0.684,0.607,11,-8.127,0,0.1,0.0267,0.000307,0.191,0.498,81.001,"pop, R&B" +Diddy - Dirty Money,Coming Home,238693,False,2010,69,0.392,0.839,7,-1.921,1,0.193,0.158,0.0,0.3,0.232,168.001,"hip hop, pop, R&B" +Rihanna,What's My Name?,263173,False,2010,66,0.692,0.786,2,-2.959,1,0.069,0.23,0.0,0.0797,0.583,100.025,"hip hop, pop, R&B" +Chris Brown,Beautiful People,225881,False,2019,53,0.415,0.775,5,-6.366,0,0.161,0.0658,0.00431,0.0843,0.536,127.898,"hip hop, pop, R&B" +Avril Lavigne,What the Hell,220706,False,2011,74,0.578,0.926,6,-3.689,0,0.0548,0.00472,0.0127,0.14,0.877,149.976,pop +Professor Green,Read All About It,235735,True,2011,39,0.656,0.752,11,-5.522,0,0.0434,0.336,0.0,0.223,0.445,100.963,Dance/Electronic +Chase & Status,Blind Faith,233666,False,2011,60,0.45,0.846,9,-4.712,0,0.0472,0.00523,0.0,0.228,0.402,140.042,Dance/Electronic +Chip,Champion (feat. Chris Brown),237293,True,2011,48,0.415,0.934,6,-2.914,0,0.24,0.078,0.0,0.145,0.52,190.151,"hip hop, Dance/Electronic" +Selena Gomez & The Scene,Who Says,195613,False,2011,76,0.682,0.927,4,-2.915,1,0.0479,0.0843,0.0,0.149,0.744,101.019,"pop, Dance/Electronic" +Rizzle Kicks,Down With The Trumpets,186851,False,2011,59,0.753,0.88,4,-4.689,0,0.0806,0.087,0.0,0.24,0.794,115.057,hip hop +Lady Gaga,Born This Way,260253,False,2011,72,0.587,0.828,11,-5.108,1,0.161,0.00327,0.0,0.331,0.494,123.907,pop +Nicki Minaj,Super Bass,200013,True,2010,0,0.72,0.861,11,-4.339,1,0.209,0.269,5.11e-06,0.601,0.669,126.991,"hip hop, pop" +Mac Miller,Donald Trump,165908,True,2011,61,0.636,0.901,1,-7.094,0,0.118,0.119,0.0,0.391,0.836,162.994,hip hop +Calvin Harris,Bounce (feat. Kelis) - Radio Edit,222186,False,2012,62,0.779,0.963,2,-2.125,0,0.0399,0.0334,0.493,0.664,0.759,127.941,"hip hop, pop, Dance/Electronic" +David Guetta,Without You (feat. Usher),208133,False,2012,70,0.608,0.614,2,-3.727,1,0.0285,0.227,4.06e-06,0.157,0.402,127.884,"hip hop, pop, Dance/Electronic" +Foster The People,Pumped Up Kicks,239600,False,2011,82,0.733,0.71,5,-5.849,0,0.0292,0.145,0.115,0.0956,0.965,127.975,"rock, pop" +Drake,Headlines,235986,True,2011,74,0.636,0.566,6,-7.16,0,0.106,0.365,0.000353,0.0917,0.425,151.894,"hip hop, pop, R&B" +Jessie J,Price Tag,223053,False,2011,73,0.636,0.831,5,-3.945,1,0.182,0.0294,3.85e-06,0.272,0.668,175.015,"hip hop, pop" +Jason Aldean,Dirt Road Anthem,229413,False,2010,53,0.678,0.739,2,-5.068,1,0.0352,0.32,2.36e-06,0.116,0.658,127.037,country +Wiz Khalifa,Roll Up,227773,True,2011,63,0.523,0.805,3,-5.473,1,0.192,0.0524,0.0,0.0914,0.602,125.358,"hip hop, pop" +OneRepublic,Good Life,253306,True,2009,72,0.634,0.69,6,-7.804,1,0.052,0.0771,0.0,0.132,0.645,94.988,pop +Bad Meets Evil,Lighters,303813,True,2011,68,0.676,0.695,0,-8.327,1,0.245,0.352,0.0,0.119,0.144,90.268,"hip hop, pop" +Bruno Mars,The Lazy Song,189109,False,2010,75,0.794,0.711,8,-5.124,0,0.0699,0.3,0.0,0.0955,0.955,174.915,pop +Lupe Fiasco,The Show Goes On,239613,True,2011,70,0.591,0.889,7,-3.839,1,0.115,0.0189,0.0,0.155,0.65,143.067,"hip hop, pop" +James Morrison,I Won't Let You Go,229303,False,2011,64,0.537,0.611,0,-6.427,1,0.0304,0.229,0.0,0.146,0.161,105.955,"pop, R&B" +Jason Derulo,It Girl,192200,False,2011,70,0.668,0.718,1,-4.736,0,0.0605,0.0165,0.0,0.104,0.345,91.993,"hip hop, pop" +Adele,Someone Like You,285240,False,2011,4,0.554,0.321,9,-8.251,1,0.028,0.893,0.0,0.0996,0.288,135.047,"pop, R&B" +Red Hot Chili Peppers,The Adventures of Rain Dance Maggie,282400,False,2011,65,0.683,0.734,4,-4.523,0,0.029,0.00112,0.0139,0.258,0.576,106.254,rock +Keri Hilson,Pretty Girl Rock,243920,False,2010,67,0.666,0.87,11,-5.004,0,0.246,0.203,0.0,0.0851,0.9,160.014,"pop, R&B" +P!nk,F**kin' Perfect,213413,True,2010,60,0.563,0.671,7,-4.788,1,0.0373,0.0422,0.0,0.36,0.45,91.964,pop +Blake Shelton,Honey Bee,210720,False,2011,50,0.481,0.849,4,-5.131,1,0.0385,0.00167,1.49e-06,0.121,0.723,205.57,country +Lady Gaga,The Edge Of Glory,320546,False,2011,67,0.583,0.768,9,-6.477,1,0.041,0.000323,0.0162,0.109,0.357,127.952,pop +Lady A,Just A Kiss,218840,False,2011,63,0.593,0.639,1,-5.826,1,0.0307,0.446,0.0,0.0998,0.332,142.881,"pop, country" +Hot Chelle Rae,Tonight Tonight,200466,False,2011,73,0.686,0.783,4,-4.977,1,0.119,0.0764,0.0,0.163,0.814,99.978,"hip hop, pop, rock" +Grouplove,Tongue Tied,218013,False,2011,79,0.56,0.936,3,-5.835,1,0.0439,0.00847,0.0,0.161,0.371,112.96,"pop, rock, Folk/Acoustic" +Coldplay,Every Teardrop Is a Waterfall,240796,False,2011,69,0.425,0.732,9,-6.883,1,0.0396,0.00194,0.0103,0.171,0.333,117.98,"rock, pop" +Beyoncé,Best Thing I Never Had,253746,False,2011,69,0.545,0.649,6,-4.062,1,0.0324,0.143,1.57e-05,0.0894,0.297,99.099,"pop, R&B" +P!nk,Raise Your Glass,202960,True,2010,76,0.7,0.709,7,-5.006,1,0.0838,0.0048,0.0,0.029,0.624,122.019,pop +Christina Perri,Jar of Hearts,246587,False,2011,72,0.349,0.348,3,-6.142,1,0.0316,0.726,0.0,0.12,0.0886,74.541,pop +Bruno Mars,Marry You,230192,False,2010,75,0.621,0.82,10,-4.865,1,0.0367,0.332,0.0,0.104,0.452,144.905,pop +The Band Perry,If I Die Young,222773,False,2010,64,0.606,0.497,4,-6.611,1,0.0277,0.348,0.0,0.275,0.362,130.739,country +Lil Wayne,How To Love,240306,False,2011,0,0.644,0.661,11,-6.093,1,0.0418,0.000177,5.23e-06,0.108,0.272,153.992,"hip hop, pop" +Charlene Soraia,Wherever You Will Go,197577,False,2011,60,0.597,0.115,9,-9.217,1,0.0334,0.82,0.000215,0.111,0.128,111.202,pop +Christina Perri,A Thousand Years,285120,False,2011,70,0.421,0.407,10,-7.445,1,0.0267,0.309,0.000961,0.11,0.161,139.028,pop +Rihanna,We Found Love,215226,False,2011,75,0.735,0.766,1,-4.485,1,0.0383,0.025,0.00138,0.108,0.6,127.985,"hip hop, pop, R&B" +Carly Rae Jepsen,Call Me Maybe,193400,False,2012,78,0.783,0.58,7,-6.548,1,0.0408,0.0114,2.28e-06,0.108,0.66,120.021,"pop, Dance/Electronic" +fun.,We Are Young (feat. Janelle Monáe),250626,False,2012,76,0.378,0.638,10,-5.576,1,0.075,0.02,7.66e-05,0.0849,0.735,184.086,"rock, pop" +Sean Paul,She Doesn't Mind,227786,False,2012,70,0.718,0.776,7,-5.208,0,0.0693,0.000624,5.3e-06,0.207,0.622,120.015,"hip hop, pop" +Pitbull,International Love (feat. Chris Brown),227280,False,2011,74,0.67,0.855,0,-3.035,0,0.0499,0.0124,0.0,0.335,0.648,120.05,"hip hop, pop, latin" +DEV,In The Dark,226226,False,2012,52,0.736,0.824,8,-4.231,1,0.0672,0.00358,0.000921,0.329,0.443,124.954,"pop, Dance/Electronic" +Adele,Skyfall,286480,False,2012,74,0.346,0.552,0,-6.864,0,0.0282,0.417,0.0,0.114,0.0789,75.881,"pop, R&B" +David Guetta,Turn Me On (feat. Nicki Minaj),199680,False,2012,65,0.704,0.793,8,-2.266,1,0.0591,0.0488,0.0,0.575,0.412,127.96,"hip hop, pop, Dance/Electronic" +Rihanna,Where Have You Been,242680,False,2011,71,0.719,0.847,0,-6.34,0,0.0916,0.00201,0.0204,0.223,0.444,127.963,"hip hop, pop, R&B" +JAY-Z,Ni**as In Paris,219333,True,2011,82,0.789,0.858,1,-5.542,1,0.311,0.127,0.0,0.349,0.775,140.022,hip hop +Maroon 5,One More Night,219546,False,2012,74,0.716,0.821,5,-3.435,0,0.0314,0.0558,0.0,0.0844,0.618,92.997,pop +Skrillex,Bangarang (feat. Sirah),215253,True,2011,70,0.716,0.972,7,-2.302,1,0.196,0.0145,3.22e-05,0.317,0.576,110.026,"hip hop, pop, Dance/Electronic" +The Wanted,Chasing The Sun,198800,False,2012,69,0.637,0.732,7,-6.209,0,0.0965,0.244,0.0,0.498,0.68,128.108,"pop, Dance/Electronic" +Flo Rida,Whistle,224653,False,2012,77,0.747,0.937,0,-5.746,1,0.0453,0.0208,0.0,0.29,0.739,103.976,"hip hop, pop" +will.i.am,This Is Love,279026,False,2013,62,0.535,0.796,0,-3.446,0,0.0445,0.00473,0.0,0.145,0.282,128.954,"hip hop, pop" +David Guetta,She Wolf (Falling to Pieces) [feat. Sia],222500,False,2012,66,0.492,0.857,7,-2.634,1,0.0655,0.0841,7.82e-06,0.344,0.393,129.973,"hip hop, pop, Dance/Electronic" +Nicki Minaj,Pound The Alarm,205640,True,2012,64,0.728,0.858,9,-3.686,1,0.0609,0.0403,4.09e-06,0.0241,0.591,125.055,"hip hop, pop" +Kendrick Lamar,Swimming Pools (Drank) - Extended Version,313786,True,2012,63,0.716,0.485,1,-7.745,1,0.404,0.123,2.69e-05,0.604,0.26,74.132,hip hop +MARINA,Primadonna,221075,False,2012,78,0.66,0.689,4,-2.671,0,0.0337,0.0884,0.0,0.0922,0.427,127.98,"pop, Dance/Electronic" +Kanye West,Mercy,329320,True,2012,73,0.563,0.496,6,-9.381,0,0.406,0.0685,5.8e-05,0.173,0.426,139.993,hip hop +DJ Fresh,Hot Right Now (feat. RITA ORA) - Radio Edit,182333,False,2012,58,0.524,0.972,4,-1.569,0,0.0431,0.00656,0.00058,0.224,0.476,175.017,"pop, Dance/Electronic" +LMFAO,Sexy And I Know It,199480,False,2011,67,0.707,0.861,7,-4.225,1,0.316,0.1,0.0,0.191,0.795,130.021,"hip hop, pop, Dance/Electronic" +PSY,Gangnam Style (강남스타일),219493,False,2012,72,0.727,0.937,11,-2.871,0,0.286,0.00417,0.0,0.091,0.749,132.067,pop +Kesha,Die Young,211920,False,2012,78,0.711,0.7,1,-4.805,0,0.046,0.00498,0.000125,0.215,0.801,128.001,"pop, Dance/Electronic" +Usher,Scream,234693,False,2012,66,0.616,0.862,7,-5.18,0,0.0973,0.00117,0.0,0.179,0.569,127.992,"hip hop, pop, R&B" +Olly Murs,Troublemaker (feat. Flo Rida),185586,False,2012,0,0.762,0.863,0,-3.689,0,0.0565,0.015,0.0,0.125,0.965,106.008,"pop, Dance/Electronic" +Linkin Park,BURN IT DOWN,230253,False,2012,73,0.585,0.972,9,-4.45,0,0.0534,0.0143,0.0,0.0707,0.585,110.006,"rock, metal" +Bruno Mars,Locked out of Heaven,233478,False,2012,85,0.726,0.698,5,-4.165,1,0.0431,0.049,0.0,0.309,0.867,143.994,pop +Avicii,Levels - Radio Edit,199906,False,2011,77,0.584,0.889,1,-5.941,0,0.0343,0.0462,0.828,0.309,0.464,126.04,"pop, Dance/Electronic" +Flo Rida,Good Feeling,248133,False,2012,76,0.706,0.89,1,-4.444,0,0.0688,0.0588,0.00286,0.306,0.684,128.011,"hip hop, pop" +Rihanna,Diamonds,225146,False,2012,75,0.564,0.71,11,-4.92,0,0.0461,0.00125,0.0,0.109,0.393,91.972,"hip hop, pop, R&B" +Wiz Khalifa,"Work Hard, Play Hard",219026,True,2012,62,0.787,0.666,11,-4.475,1,0.0485,0.0409,1.91e-05,0.296,0.485,140.008,"hip hop, pop" +Katy Perry,Part Of Me,216160,False,2012,73,0.678,0.918,5,-4.63,1,0.0355,0.000417,0.0,0.0744,0.769,130.028,pop +Nicki Minaj,Starships,210626,True,2012,75,0.747,0.716,11,-2.457,0,0.075,0.135,0.0,0.251,0.751,125.008,"hip hop, pop" +Kendrick Lamar,m.A.A.d city,350120,True,2012,67,0.487,0.729,2,-6.815,1,0.271,0.0538,4.07e-06,0.44,0.217,91.048,hip hop +Asaf Avidan & the Mojos,One Day / Reckoning Song (Wankelmut Remix) [Radio Edit],212360,False,2012,0,0.821,0.676,3,-6.366,0,0.0547,0.187,0.0001,0.0927,0.559,119.012,pop +Florence + The Machine,Spectrum (Say My Name) - Calvin Harris Remix,218190,False,2011,68,0.578,0.946,11,-3.85,0,0.0482,0.00225,0.00412,0.0966,0.588,126.092,"rock, pop, Dance/Electronic" +Azealia Banks,212,204956,True,2014,0,0.847,0.769,11,-5.761,0,0.258,0.0145,0.00012,0.0767,0.626,126.017,"hip hop, pop, R&B" +Maroon 5,Payphone,231173,True,2012,82,0.743,0.752,4,-4.813,1,0.0414,0.0188,0.0,0.287,0.545,110.015,pop +James Arthur,Impossible,209440,False,2013,75,0.376,0.695,0,-4.782,0,0.0933,0.135,0.0,0.11,0.302,169.533,pop +Michel Teló,Ai Se Eu Te Pego - Live,166866,False,2012,0,0.676,0.935,11,-4.55,1,0.0692,0.357,0.0,0.853,0.85,96.055,"country, latin" +Otto Knows,Million Voices - Radio Edit,192866,False,2012,0,0.582,0.894,8,-6.298,1,0.041,0.0022,0.0223,0.0664,0.0694,125.946,"pop, Dance/Electronic" +Icona Pop,I Love It,156773,True,2012,51,0.71,0.901,1,-2.686,1,0.0296,0.00828,1.34e-05,0.172,0.86,125.953,"pop, Dance/Electronic" +Tyga,Rack City,203200,True,2012,61,0.929,0.339,1,-10.881,1,0.371,0.0373,1.96e-05,0.187,0.273,98.986,"hip hop, pop" +Calvin Harris,Feel So Close - Radio Edit,206413,False,2012,79,0.707,0.924,7,-2.842,1,0.031,0.000972,0.00703,0.204,0.919,127.937,"hip hop, pop, Dance/Electronic" +Jennifer Lopez,Dance Again,237266,False,2012,0,0.797,0.867,11,-5.242,0,0.0696,0.0158,2.79e-06,0.0868,0.716,127.974,"hip hop, pop, R&B" +Grimes,Oblivion,251266,False,2012,0,0.697,0.529,2,-8.838,1,0.0338,0.115,0.901,0.0895,0.486,155.974,"pop, rock, Dance/Electronic" +Adele,Rumour Has It,223266,False,2011,0,0.612,0.748,0,-5.014,1,0.0445,0.617,0.0,0.167,0.574,120.052,"pop, R&B" +Coldplay,Paradise,278719,False,2011,82,0.449,0.585,5,-6.761,1,0.0268,0.0509,8.75e-05,0.0833,0.212,139.631,"rock, pop" +Chris Brown,Turn Up the Music,227973,False,2012,64,0.594,0.841,1,-5.792,1,0.102,0.000238,2.22e-06,0.156,0.643,129.925,"hip hop, pop, R&B" +Frank Ocean,Lost,234093,True,2012,64,0.913,0.603,8,-4.892,1,0.226,0.0272,0.000503,0.167,0.497,123.061,"hip hop, pop, R&B" +Calvin Harris,Let's Go (feat. Ne-Yo),232800,False,2012,66,0.71,0.882,4,-2.932,0,0.0595,0.00777,0.00771,0.294,0.875,128.016,"hip hop, pop, Dance/Electronic" +The Script,Hall of Fame (feat. will.i.am),202533,False,2012,78,0.421,0.873,10,-4.343,1,0.0564,0.0654,0.0,0.123,0.629,84.786,"pop, rock" +Swedish House Mafia,Don't You Worry Child - Radio Edit,212862,False,2012,76,0.612,0.84,11,-3.145,0,0.0509,0.112,0.0,0.116,0.438,129.042,"pop, Dance/Electronic" +MARINA,How to Be a Heartbreaker,221493,False,2012,50,0.69,0.897,11,-4.696,0,0.0506,0.0142,0.0,0.108,0.849,140.05,"pop, Dance/Electronic" +Justin Bieber,Boyfriend,171333,False,2012,63,0.717,0.55,10,-6.019,0,0.0519,0.0358,0.00198,0.126,0.331,96.979,pop +David Guetta,Titanium (feat. Sia),245040,False,2012,79,0.604,0.787,0,-3.674,0,0.103,0.0679,0.15,0.127,0.301,126.062,"hip hop, pop, Dance/Electronic" +Jessie J,Domino,231840,False,2011,74,0.758,0.557,7,-4.568,1,0.034,0.0117,0.0,0.0418,0.781,126.986,"hip hop, pop" +Taylor Swift,I Knew You Were Trouble.,219720,False,2012,59,0.622,0.469,3,-6.798,0,0.0363,0.00454,2.25e-06,0.0335,0.679,77.019,pop +ScHoolboy Q,Hands on the Wheel (feat. Asap Rocky),197131,True,2012,61,0.646,0.784,1,-7.471,0,0.108,0.0166,0.0,0.0721,0.179,127.839,"hip hop, pop" +P!nk,Try,247906,False,2012,76,0.674,0.628,2,-7.079,1,0.03,0.00144,0.0,0.0944,0.552,103.998,pop +Robbie Williams,Candy,201053,False,2012,68,0.715,0.791,10,-6.63,1,0.0414,0.0368,0.0,0.0694,0.879,116.043,"pop, rock" +Snoop Dogg,"Young, Wild & Free (feat. Bruno Mars)",207333,True,2011,77,0.715,0.655,0,-6.425,1,0.137,0.0525,0.0,0.115,0.531,95.078,"hip hop, pop" +Justin Bieber,As Long As You Love Me,229466,False,2012,67,0.571,0.873,0,-3.382,0,0.1,0.0811,0.0,0.361,0.613,139.691,pop +Rita Ora,R.I.P. (feat. Tinie Tempah),228026,False,2012,46,0.603,0.831,11,-3.443,0,0.0479,0.027,0.0,0.652,0.358,72.022,"hip hop, pop, Dance/Electronic" +Alex Clare,Too Close,256613,False,2011,60,0.588,0.694,11,-4.278,0,0.0387,0.00948,0.0,0.113,0.271,126.027,rock +Tyga,Faded,206666,True,2012,58,0.831,0.667,1,-8.438,1,0.188,0.0103,0.0,0.35,0.339,95.073,"hip hop, pop" +Alesso,Years - ラジオ・エディット,195480,False,2012,0,0.37,0.823,2,-6.245,1,0.0805,0.00234,1.91e-06,0.174,0.323,128.008,"pop, Dance/Electronic" +Guy Sebastian,Battle Scars (feat. Lupe Fiasco),250080,False,2012,0,0.61,0.863,5,-2.632,0,0.206,0.186,0.0,0.097,0.508,83.993,pop +Katy Perry,The One That Got Away,227333,False,2012,72,0.687,0.792,1,-4.023,0,0.0353,0.000802,0.0,0.2,0.864,133.962,pop +Avicii,Silhouettes - Original Radio Edit,211880,False,2012,53,0.605,0.8,5,-6.235,0,0.0545,0.155,0.0562,0.121,0.836,128.074,"pop, Dance/Electronic" +Gotye,Somebody That I Used To Know,244884,False,2011,57,0.863,0.527,0,-6.896,1,0.0358,0.607,0.000168,0.107,0.674,129.054,set() +Flo Rida,Wild Ones (feat. Sia),232946,False,2012,77,0.608,0.86,5,-5.324,0,0.0554,0.0991,0.0,0.262,0.437,127.075,"hip hop, pop" +One Direction,What Makes You Beautiful,199986,False,2012,82,0.726,0.787,4,-2.494,1,0.0737,0.009,0.0,0.0596,0.888,124.99,pop +Lana Del Rey,Born To Die,286253,False,2012,53,0.18,0.636,4,-6.591,0,0.0439,0.209,0.000133,0.217,0.39,75.223,pop +Kesha,C'Mon,214333,False,2012,0,0.558,0.781,1,-5.448,0,0.0963,0.00657,0.00144,0.102,0.286,126.025,"pop, Dance/Electronic" +Owl City,Good Time,205933,False,2012,77,0.56,0.872,3,-4.269,1,0.14,0.0239,6.98e-06,0.371,0.682,126.05,"rock, pop" +Calvin Harris,We'll Be Coming Back (feat. Example),234360,False,2012,62,0.596,0.952,7,-4.364,1,0.0873,0.00131,0.0,0.598,0.571,127.945,"hip hop, pop, Dance/Electronic" +Emeli Sandé,"Read All About It, Pt. III",283706,False,2012,65,0.63,0.44,2,-7.186,1,0.0249,0.822,3.87e-06,0.0926,0.343,98.082,"pop, R&B" +Ellie Goulding,Lights - Single Version,210853,False,2010,63,0.682,0.795,8,-6.17,0,0.0367,0.0297,0.0391,0.131,0.78,120.008,"pop, rock, Dance/Electronic" +Train,Drive By,195973,False,2012,77,0.765,0.837,1,-3.113,0,0.032,0.00107,1.06e-05,0.0801,0.721,122.028,pop +Disclosure,Latch,255631,False,2013,74,0.503,0.727,1,-5.456,1,0.167,0.0159,9.45e-05,0.0895,0.521,121.985,"pop, Dance/Electronic" +Coldplay,Princess of China,239215,False,2011,66,0.42,0.69,9,-6.221,0,0.0347,0.00385,0.015,0.287,0.237,85.014,"rock, pop" +One Direction,Live While We're Young,200186,False,2012,0,0.658,0.837,2,-2.063,1,0.0543,0.0629,0.0,0.0969,0.936,126.015,pop +Gym Class Heroes,Ass Back Home (feat. Neon Hitch),222213,True,2011,61,0.716,0.838,10,-4.289,1,0.0513,0.134,0.0,0.148,0.646,130.034,"hip hop, pop" +Carrie Underwood,Blown Away,240133,False,2012,63,0.531,0.843,9,-2.569,0,0.0429,0.0909,0.0,0.0283,0.392,136.991,"pop, country" +alt-J,Breezeblocks,227080,False,2012,71,0.616,0.656,5,-7.298,1,0.0344,0.096,0.000879,0.205,0.286,150.071,rock +Chris Brown,Don't Wake Me Up,222306,False,2012,62,0.602,0.691,7,-5.197,0,0.051,0.0548,0.0,0.144,0.206,127.967,"hip hop, pop, R&B" +Kelly Clarkson,Stronger (What Doesn't Kill You),221946,False,2011,74,0.562,0.939,0,-4.282,1,0.0475,0.046,0.0,0.112,0.684,116.044,"pop, R&B" +Drake,The Motto,181573,True,2011,74,0.766,0.442,1,-8.558,1,0.356,0.000107,6.12e-05,0.111,0.39,201.8,"hip hop, pop, R&B" +Katy Perry,Wide Awake,220946,False,2012,65,0.514,0.683,5,-5.099,1,0.0367,0.0749,2.64e-06,0.392,0.575,159.814,pop +Rita Ora,How We Do (Party),247026,True,2012,48,0.738,0.922,7,-3.94,1,0.0833,0.0034,0.0,0.0986,0.689,116.024,"hip hop, pop, Dance/Electronic" +Rudimental,Feel the Love (feat. John Newman),245186,False,2013,64,0.389,0.706,1,-6.849,1,0.0593,0.0026,0.000182,0.686,0.238,179.911,"pop, Dance/Electronic" +Lana Del Rey,Ride,289080,True,2012,66,0.373,0.686,0,-5.52,1,0.034,0.128,1.96e-06,0.383,0.189,93.763,pop +Ne-Yo,Let Me Love You (Until You Learn To Love Yourself),251626,False,2012,69,0.658,0.677,5,-6.628,1,0.0393,0.248,0.0,0.368,0.248,124.91,"pop, R&B" +Demi Lovato,Give Your Heart a Break,205346,False,2011,71,0.651,0.695,6,-3.218,1,0.0487,0.23,0.0,0.144,0.569,123.008,pop +Drake,Take Care,277386,True,2011,75,0.629,0.515,0,-10.358,0,0.265,0.0267,1.22e-05,0.0888,0.299,121.845,"hip hop, pop, R&B" +Little Mix,Wings,219733,False,2012,0,0.738,0.875,7,-3.141,1,0.127,0.000673,0.000556,0.285,0.538,114.962,pop +Bruno Mars,It Will Rain,257720,False,2011,74,0.576,0.835,2,-6.826,1,0.0486,0.337,0.0,0.082,0.476,150.017,pop +Cher Lloyd,Want U Back,214013,False,2011,67,0.696,0.893,9,-2.963,1,0.076,0.0662,0.0,0.473,0.573,97.954,"pop, Dance/Electronic" +Lorde,Royals,190185,False,2013,77,0.674,0.428,7,-9.504,1,0.122,0.121,0.0,0.132,0.337,84.878,"pop, Dance/Electronic" +Robin Thicke,Blurred Lines,263053,False,2013,55,0.861,0.504,7,-7.707,1,0.0489,0.00412,1.78e-05,0.0783,0.881,120.0,"pop, R&B" +Imagine Dragons,Radioactive,186813,False,2012,77,0.448,0.784,9,-3.686,1,0.0627,0.106,0.000108,0.668,0.236,136.245,rock +Avicii,Wake Me Up,247426,False,2013,6,0.532,0.783,2,-5.697,1,0.0523,0.0038,0.0012,0.161,0.643,124.08,"pop, Dance/Electronic" +Bruno Mars,Locked out of Heaven,233478,False,2012,85,0.726,0.698,5,-4.165,1,0.0431,0.049,0.0,0.309,0.867,143.994,pop +Macklemore & Ryan Lewis,Thrift Shop (feat. Wanz),235613,True,2012,1,0.781,0.526,6,-6.985,0,0.293,0.0619,0.0,0.0457,0.662,94.992,"hip hop, pop" +Daft Punk,Get Lucky (feat. Pharrell Williams & Nile Rodgers) - Radio Edit,248413,False,2013,83,0.794,0.811,6,-8.966,0,0.038,0.0426,1.07e-06,0.101,0.862,116.047,"hip hop, Dance/Electronic" +will.i.am,Scream & Shout,283400,True,2013,78,0.772,0.685,5,-6.849,1,0.0696,0.019,8.96e-05,0.131,0.501,130.033,"hip hop, pop" +Stromae,Papaoutai,232146,False,2013,0,0.733,0.818,10,-7.222,0,0.0859,0.0241,0.0,0.0636,0.253,116.019,pop +Bingo Players,Get Up (Rattle) - Vocal Edit,166933,False,2013,1,0.801,0.985,7,-2.69,1,0.0645,0.0205,6.86e-06,0.296,0.722,127.99,"pop, Dance/Electronic" +Naughty Boy,La La La,220779,False,2013,58,0.754,0.677,6,-4.399,0,0.0316,0.112,0.0,0.111,0.254,124.988,Dance/Electronic +Tom Odell,Another Love,244360,True,2013,88,0.445,0.537,4,-8.532,0,0.04,0.695,1.65e-05,0.0944,0.131,122.769,pop +2 Chainz,We Own It (Fast & Furious),227906,False,2013,69,0.563,0.902,1,-4.586,0,0.402,0.0545,0.0,0.0524,0.559,171.999,"hip hop, pop" +David Guetta,Play Hard (feat. Ne-Yo & Akon),201000,False,2012,69,0.691,0.921,8,-1.702,0,0.0533,0.173,0.0,0.331,0.8,130.072,"hip hop, pop, Dance/Electronic" +Pitbull,Feel This Moment (feat. Christina Aguilera),229506,False,2012,77,0.673,0.758,7,-3.632,1,0.158,0.039,0.0,0.341,0.542,135.956,"hip hop, pop, latin" +will.i.am,#thatPOWER,279506,False,2013,68,0.797,0.608,6,-6.096,0,0.0584,0.00112,7.66e-05,0.0748,0.402,127.999,"hip hop, pop" +Lil Wayne,Love Me,255053,True,2013,68,0.669,0.634,11,-6.476,1,0.0327,0.0125,0.0,0.0946,0.496,124.906,"hip hop, pop" +Macklemore & Ryan Lewis,Can't Hold Us (feat. Ray Dalton),258342,False,2012,81,0.641,0.922,2,-4.457,1,0.0786,0.0291,0.0,0.0862,0.847,146.078,"hip hop, pop" +Martin Garrix,Animals - Radio Edit,176117,False,2013,0,0.593,0.914,1,-5.351,1,0.0363,0.00137,0.445,0.0714,0.0381,128.015,"pop, Dance/Electronic" +Jason Derulo,Talk Dirty (feat. 2 Chainz),177685,True,2013,56,0.76,0.652,6,-7.321,1,0.232,0.0348,0.0,0.307,0.759,100.315,"hip hop, pop" +John Newman,Love Me Again,239894,False,2013,74,0.495,0.894,2,-4.814,0,0.0441,0.00453,0.000596,0.103,0.213,126.03,set() +Britney Spears,Work B**ch,247853,True,2013,0,0.63,0.816,9,-6.535,1,0.15,0.124,0.00111,0.0616,0.85,128.012,pop +The Neighbourhood,Sweater Weather,240400,False,2013,89,0.612,0.807,10,-2.81,1,0.0336,0.0495,0.0177,0.101,0.398,124.053,"rock, pop" +Sebastian Ingrosso,Reload - Radio Edit,221272,False,2013,68,0.485,0.724,9,-4.633,0,0.0521,0.0736,0.0,0.0631,0.433,128.045,"pop, Dance/Electronic" +Capital Cities,Safe And Sound,192693,False,2013,0,0.652,0.783,0,-4.829,1,0.0309,0.000189,0.0173,0.0889,0.836,117.952,pop +Demi Lovato,Heart Attack,210840,False,2013,79,0.504,0.785,8,-4.802,1,0.104,0.0738,0.0,0.239,0.502,173.968,pop +Arctic Monkeys,Why'd You Only Call Me When You're High?,161123,False,2013,84,0.691,0.631,2,-6.478,1,0.0368,0.0483,1.13e-05,0.104,0.8,92.004,rock +Justin Bieber,Beauty And A Beat,227986,False,2012,73,0.602,0.843,0,-4.831,1,0.0593,0.000688,5.27e-05,0.0682,0.526,128.003,pop +Eminem,The Monster,250188,True,2013,75,0.781,0.853,1,-3.68,0,0.0715,0.0525,0.0,0.12,0.624,110.049,hip hop +Pitbull,Don't Stop the Party (feat. TJR),206120,False,2012,64,0.722,0.958,4,-3.617,1,0.0912,0.00726,0.0,0.375,0.952,127.008,"hip hop, pop, latin" +Selena Gomez,Come & Get It,231986,False,2013,69,0.546,0.787,7,-4.1,0,0.0517,0.0101,0.00021,0.0809,0.573,79.979,pop +P!nk,True Love (feat. Lily Allen),230733,True,2012,71,0.457,0.823,9,-4.76,1,0.311,0.00144,0.0,0.108,0.578,192.205,pop +Disclosure,White Noise,277687,False,2013,54,0.665,0.844,10,-6.164,0,0.0501,0.00963,0.000215,0.311,0.902,119.978,"pop, Dance/Electronic" +Flo Rida,I Cry,223800,False,2012,60,0.693,0.822,4,-5.441,0,0.0439,0.00616,1.79e-06,0.315,0.763,126.035,"hip hop, pop" +Kanye West,Black Skinhead,188013,True,2013,0,0.578,0.825,1,-6.107,1,0.322,0.000986,0.0,0.176,0.283,130.089,hip hop +Lana Del Rey,Summertime Sadness (Lana Del Rey Vs. Cedric Gervais) - Cedric Gervais Remix,214912,False,2013,72,0.572,0.81,1,-5.791,0,0.0558,0.0157,6.53e-06,0.13,0.11,126.052,pop +Eminem,Berzerk,238746,True,2013,66,0.739,0.872,11,-4.059,0,0.333,0.0217,0.0,0.26,0.684,95.084,hip hop +Icona Pop,I Love It (feat. Charli XCX),157152,True,2013,68,0.711,0.906,8,-2.671,1,0.0284,0.00952,1.64e-05,0.153,0.824,125.916,"pop, Dance/Electronic" +Fall Out Boy,My Songs Know What You Did In The Dark (Light Em Up),186826,False,2013,71,0.558,0.924,9,-4.341,0,0.064,0.0271,3.32e-06,0.537,0.567,151.99,rock +Lorde,Team,193058,False,2013,0,0.691,0.582,6,-7.444,1,0.0939,0.162,0.0,0.247,0.427,100.05,"pop, Dance/Electronic" +Avicii,I Could Be The One (Avicii Vs. Nicky Romero) - Radio Edit,208316,False,2012,68,0.509,0.79,6,-3.782,0,0.0374,0.332,6.67e-05,0.316,0.638,127.946,"pop, Dance/Electronic" +Imagine Dragons,Demons,177506,False,2012,81,0.505,0.71,3,-3.015,1,0.0321,0.19,0.00025,0.269,0.428,89.938,rock +One Direction,One Way or Another (Teenage Kicks),157293,False,2013,66,0.489,0.867,4,-3.121,0,0.071,0.025,0.0,0.586,0.409,162.131,pop +Taylor Swift,I Knew You Were Trouble.,219720,False,2012,76,0.622,0.469,3,-6.798,0,0.0363,0.00454,2.25e-06,0.0335,0.679,77.019,pop +Arctic Monkeys,Do I Wanna Know?,272394,False,2013,84,0.548,0.532,5,-7.596,1,0.0323,0.186,0.000263,0.217,0.405,85.03,rock +Rudimental,Waiting All Night (feat. Ella Eyre),292586,False,2013,66,0.544,0.728,6,-5.358,0,0.05,0.00264,0.00959,0.427,0.281,174.983,"pop, Dance/Electronic" +Klangkarussell,Sonnentanz - Sun Don't Shine,238120,False,2014,68,0.579,0.549,5,-8.262,0,0.0909,0.104,0.0202,0.0964,0.155,119.74,"pop, Dance/Electronic" +Calvin Harris,Drinking from the Bottle (feat. Tinie Tempah),240346,False,2012,61,0.665,0.886,9,-4.175,0,0.0514,0.0469,6.24e-05,0.0525,0.53,128.062,"hip hop, pop, Dance/Electronic" +A$AP Rocky,"F**kin' Problems (feat. Drake, 2 Chainz & Kendrick Lamar)",233786,True,2013,76,0.853,0.693,1,-6.87,1,0.275,0.0239,0.0,0.11,0.662,95.967,hip hop +P!nk,Try,247906,False,2012,76,0.674,0.628,2,-7.079,1,0.03,0.00144,0.0,0.0944,0.552,103.998,pop +Ylvis,The Fox (What Does the Fox Say?),213708,False,2013,63,0.703,0.867,6,-4.292,1,0.0453,0.107,0.0,0.119,0.546,128.008,hip hop +Ellie Goulding,Burn,231211,False,2014,70,0.559,0.777,1,-5.031,1,0.0432,0.31,0.0,0.105,0.329,87.016,"pop, rock, Dance/Electronic" +Baauer,Harlem Shake,196664,False,2013,57,0.452,0.794,0,-5.151,1,0.0483,0.0111,0.00182,0.416,0.282,137.825,Dance/Electronic +Calvin Harris,I Need Your Love (feat. Ellie Goulding),234506,False,2012,70,0.695,0.869,8,-5.066,1,0.0483,0.41,0.0,0.237,0.58,124.989,"hip hop, pop, Dance/Electronic" +Katy Perry,This Is How We Do,204285,False,2013,60,0.69,0.636,9,-6.028,0,0.0457,0.0203,0.0,0.147,0.8,96.0,pop +AWOLNATION,Sail,259102,False,2011,2,0.825,0.435,1,-9.582,1,0.0568,0.452,0.609,0.0953,0.243,119.038,"pop, rock" +Duke Dumont,Need U (100%) [feat. A*M*E] - Radio Edit,174853,False,2013,27,0.681,0.835,0,-5.705,0,0.049,0.00176,0.00183,0.354,0.469,124.047,"pop, Dance/Electronic" +Jason Derulo,The Other Side,226986,False,2013,50,0.561,0.836,9,-3.939,1,0.1,0.0525,0.0,0.136,0.517,127.923,"hip hop, pop" +Storm Queen,Look Right Through - MK Vocal Edit,150400,False,2014,0,0.832,0.815,0,-8.035,0,0.081,0.00304,0.00553,0.263,0.519,119.995,Dance/Electronic +Calvin Harris,Sweet Nothing (feat. Florence Welch),212560,False,2012,71,0.573,0.929,8,-3.942,0,0.109,0.197,0.000112,0.0567,0.582,127.934,"hip hop, pop, Dance/Electronic" +Katy Perry,Unconditionally,228878,False,2013,0,0.555,0.729,7,-4.813,1,0.0387,0.00357,0.0,0.209,0.369,129.003,pop +Wale,Bad (feat. Rihanna) - Remix,238826,True,2013,61,0.807,0.702,6,-6.594,0,0.0748,0.307,0.0,0.111,0.447,114.038,"hip hop, pop, R&B" +Ciara,Body Party,234040,False,2013,64,0.577,0.576,2,-6.299,1,0.0563,0.0143,0.0,0.602,0.12,133.973,"pop, R&B" +Drake,Started From the Bottom,174120,True,2013,65,0.794,0.523,8,-7.829,1,0.161,0.0331,0.0,0.156,0.56,86.307,"hip hop, pop, R&B" +Birdy,Wings,252106,False,2013,48,0.474,0.707,7,-4.534,1,0.0253,0.0831,0.0,0.162,0.224,83.235,pop +Bastille,Of The Night,214205,False,2013,61,0.67,0.829,5,-7.2,0,0.0427,0.0192,0.000225,0.089,0.349,125.01,"pop, rock" +Olly Murs,Dear Darlin',206373,False,2012,59,0.512,0.828,11,-4.672,0,0.0454,0.00627,8.73e-06,0.119,0.34,124.021,"pop, Dance/Electronic" +Katy Perry,Roar,223546,False,2013,73,0.554,0.772,7,-4.821,0,0.0418,0.00487,6.6e-06,0.354,0.455,179.984,pop +Blake Shelton,Sure Be Cool If You Did,215720,False,2013,56,0.576,0.726,11,-4.625,1,0.035,0.255,0.0,0.108,0.582,136.802,country +Calvin Harris,Thinking About You (feat. Ayah Marar),247933,False,2012,65,0.725,0.874,0,-3.715,0,0.0396,0.00262,0.000412,0.0958,0.748,127.985,"hip hop, pop, Dance/Electronic" +JAY-Z,Holy Grail,338413,True,2013,62,0.676,0.534,2,-6.901,0,0.0831,0.0594,8.59e-06,0.256,0.156,145.082,hip hop +Bridgit Mendler,Ready or Not,200946,False,2012,72,0.715,0.872,2,-3.835,1,0.0509,0.00351,3.84e-05,0.107,0.748,93.043,pop +The Saturdays,What About Us,220682,False,2013,61,0.704,0.68,7,-4.751,1,0.0581,0.0059,0.0,0.213,0.689,123.973,"pop, Dance/Electronic" +Bastille,Pompeii,214147,False,2013,74,0.679,0.715,9,-6.383,1,0.0407,0.0755,0.0,0.271,0.571,127.435,"pop, rock" +One Direction,Best Song Ever,200106,False,2013,76,0.652,0.877,1,-2.986,1,0.0465,0.0227,0.0,0.0789,0.486,118.491,pop +Nelly,Hey Porsche,209466,True,2013,57,0.726,0.795,9,-4.653,1,0.029,0.136,0.0,0.698,0.952,115.995,"hip hop, pop, R&B" +Zedd,Clarity,271426,False,2012,0,0.523,0.78,8,-3.464,1,0.0753,0.0366,0.0,0.0751,0.193,128.006,"hip hop, pop, Dance/Electronic" +Miley Cyrus,Wrecking Ball,221360,False,2013,77,0.53,0.422,5,-6.262,1,0.0342,0.407,0.0,0.107,0.349,119.964,pop +Chris Brown,Fine China,213666,False,2013,0,0.659,0.735,10,-4.758,0,0.056,0.0466,0.0,0.111,0.609,104.038,"hip hop, pop, R&B" +Daft Punk,Lose Yourself to Dance (feat. Pharrell Williams),353893,False,2013,69,0.832,0.659,10,-7.828,0,0.057,0.0839,0.00114,0.0753,0.674,100.163,"hip hop, Dance/Electronic" +Armin van Buuren,This Is What It Feels Like,204360,False,2013,66,0.551,0.833,8,-5.217,1,0.03,0.0391,2.77e-06,0.0632,0.145,129.885,"pop, Dance/Electronic" +Justin Timberlake,Mirrors,484146,False,2013,78,0.574,0.512,5,-6.664,0,0.0503,0.234,0.0,0.0946,0.512,76.899,pop +Bruno Mars,Treasure,178560,True,2012,78,0.874,0.692,5,-5.28,0,0.0431,0.0412,7.24e-05,0.324,0.937,116.017,pop +Of Monsters and Men,Little Talks,266600,False,2012,79,0.457,0.757,1,-5.177,1,0.032,0.0206,0.0,0.146,0.417,102.961,"pop, rock, Folk/Acoustic" +Miley Cyrus,We Can't Stop,231240,False,2013,78,0.613,0.622,1,-5.794,0,0.0334,0.00882,0.0,0.37,0.484,80.003,pop +J. Cole,Power Trip (feat. Miguel),241160,True,2013,0,0.667,0.61,1,-7.054,1,0.217,0.322,0.000203,0.426,0.465,99.991,hip hop +Paramore,Still into You,216013,False,2013,79,0.602,0.923,5,-3.763,1,0.044,0.0098,0.0,0.0561,0.765,136.01,"pop, rock" +Christina Perri,human,250706,False,2013,59,0.439,0.489,8,-6.286,1,0.0368,0.132,0.000643,0.114,0.253,143.808,pop +Drake,"Hold On, We're Going Home",227880,False,2013,72,0.773,0.414,6,-7.436,0,0.0961,0.00411,3.4e-05,0.0733,0.289,99.993,"hip hop, pop, R&B" +James Blunt,Bonfire Heart,238000,False,2013,0,0.575,0.821,0,-5.7,1,0.0527,0.181,0.0,0.124,0.449,118.021,pop +Maroon 5,Daylight,225066,False,2012,66,0.656,0.674,7,-5.473,1,0.0269,0.00242,0.0,0.255,0.369,120.001,pop +Childish Gambino,3005,235662,True,2013,0,0.681,0.463,6,-6.542,0,0.289,0.128,0.0,0.0769,0.661,82.92,hip hop +Avril Lavigne,Here's to Never Growing Up,214080,True,2013,0,0.483,0.871,0,-3.084,1,0.101,0.015,0.0,0.415,0.716,164.986,pop +Taylor Swift,22,232120,False,2012,68,0.661,0.729,7,-6.561,1,0.0376,0.00215,0.0013,0.0477,0.668,103.987,pop +Passenger,Let Her Go,252866,False,2012,73,0.509,0.538,7,-7.335,1,0.0572,0.385,0.0,0.104,0.244,75.089,pop +Ariana Grande,The Way,227026,False,2013,65,0.645,0.878,5,-3.208,0,0.113,0.294,0.0,0.076,0.862,82.324,pop +Rihanna,Stay,240706,False,2012,76,0.621,0.31,9,-10.164,0,0.0283,0.945,6.12e-05,0.117,0.125,111.893,"hip hop, pop, R&B" +Justin Timberlake,Suit & Tie (feat. Jay-Z),326280,True,2013,67,0.795,0.596,11,-3.799,0,0.219,0.103,0.0,0.164,0.357,101.985,pop +Bruno Mars,When I Was Your Man,213826,False,2012,83,0.612,0.28,0,-8.648,1,0.0434,0.932,0.0,0.088,0.387,72.795,pop +Florida Georgia Line,Cruise,208960,False,2012,57,0.457,0.948,10,-3.364,1,0.0354,0.0191,0.0,0.0536,0.878,148.0,country +Pharrell Williams,"Happy - From ""Despicable Me 2""",232720,False,2014,79,0.647,0.822,5,-4.662,0,0.183,0.219,0.0,0.0908,0.962,160.019,"hip hop, pop" +Katy Perry,Dark Horse,215672,False,2013,74,0.647,0.585,6,-6.123,1,0.0512,0.00314,0.0,0.165,0.353,131.934,pop +ScHoolboy Q,Collard Greens,299960,True,2014,0,0.826,0.571,11,-4.871,1,0.064,0.0239,7.38e-06,0.207,0.666,153.972,"hip hop, pop" +Iggy Azalea,Fancy,199938,True,2014,69,0.912,0.716,10,-4.141,0,0.0697,0.0904,0.0,0.0491,0.377,94.981,"hip hop, pop" +Maroon 5,Animals,231013,False,2014,79,0.279,0.742,4,-6.46,0,0.0898,0.000185,0.0,0.593,0.328,189.868,pop +Sam Smith,Stay With Me,172723,False,2014,80,0.418,0.42,0,-6.444,1,0.0414,0.588,6.39e-05,0.11,0.184,84.094,pop +MAGIC!,Rude,224840,False,2014,80,0.773,0.758,1,-4.993,1,0.0381,0.0422,0.0,0.305,0.925,144.033,set() +Sia,Chandelier,216120,False,2014,78,0.399,0.787,1,-2.88,1,0.0499,0.0197,6.07e-05,0.0685,0.572,117.089,pop +John Legend,All of Me,269560,False,2013,84,0.422,0.264,8,-7.064,1,0.0322,0.922,0.0,0.132,0.331,119.93,"pop, R&B" +ZHU,Faded,223386,False,2014,49,0.861,0.475,9,-7.195,0,0.0487,0.00865,0.119,0.122,0.599,124.96,Dance/Electronic +Meghan Trainor,All About That Bass,187920,True,2015,72,0.807,0.887,9,-3.726,1,0.0503,0.0573,2.87e-06,0.124,0.961,134.052,pop +David Guetta,Lovers on the Sun (feat. Sam Martin),203520,False,2014,50,0.645,0.891,6,-2.505,0,0.0387,0.0932,3.88e-06,0.379,0.568,124.915,"hip hop, pop, Dance/Electronic" +Iggy Azalea,Black Widow,209423,True,2014,67,0.743,0.72,11,-3.753,1,0.124,0.192,0.000386,0.109,0.519,163.99,"hip hop, pop" +Tove Lo,Habits (Stay High),209160,False,2014,74,0.733,0.65,5,-3.539,1,0.0315,0.0703,6.59e-05,0.0829,0.348,110.003,"pop, Dance/Electronic" +Oliver Heldens,Gecko (Overdrive) - Radio Edit,165440,False,2014,67,0.609,0.885,0,-5.469,1,0.0642,0.00521,1.15e-05,0.336,0.76,124.959,"pop, Dance/Electronic" +DJ Snake,You Know You Like It,247266,False,2014,69,0.407,0.725,5,-5.346,0,0.188,0.0141,2.46e-06,0.306,0.247,196.093,"hip hop, pop, Dance/Electronic" +Jason Derulo,Talk Dirty (feat. 2 Chainz),177685,True,2013,56,0.76,0.652,6,-7.321,1,0.232,0.0348,0.0,0.307,0.759,100.315,"hip hop, pop" +Maroon 5,Maps,189840,False,2014,0,0.737,0.723,1,-5.51,0,0.0295,0.0179,0.0,0.0675,0.893,120.003,pop +David Guetta,Bad (feat. Vassy) - Radio Edit,170625,False,2014,45,0.614,0.972,5,-3.927,0,0.088,0.00125,0.0186,0.328,0.411,127.966,"hip hop, pop, Dance/Electronic" +Tinashe,2 On (feat. ScHoolboy Q),227000,True,2014,71,0.742,0.595,9,-7.51,1,0.107,0.15,0.0,0.111,0.431,101.013,"hip hop, pop, R&B, Dance/Electronic" +Route 94,My Love (feat. Jess Glynne),259934,False,2014,72,0.814,0.622,8,-7.573,1,0.0492,0.000132,0.72,0.0658,0.726,119.976,"pop, Dance/Electronic" +Taylor Swift,Bad Blood,211933,False,2014,54,0.646,0.794,7,-6.104,1,0.19,0.0885,6.16e-06,0.201,0.287,170.216,pop +Trey Songz,Na Na,231906,False,2015,62,0.67,0.476,9,-6.253,0,0.0406,0.328,0.0,0.104,0.235,96.975,"hip hop, pop, R&B" +Calvin Harris,Under Control (feat. Hurts),184280,False,2014,72,0.544,0.915,8,-3.405,1,0.0841,0.129,0.000914,0.115,0.51,126.094,"hip hop, pop, Dance/Electronic" +O.T. Genasis,CoCo,239573,True,2014,61,0.886,0.628,8,-5.949,1,0.131,0.00215,1.15e-05,0.31,0.178,119.906,"hip hop, pop" +Jason Derulo,Wiggle (feat. Snoop Dogg),193295,False,2013,66,0.697,0.621,9,-6.886,0,0.25,0.0802,0.0,0.162,0.721,81.946,"hip hop, pop" +Lilly Wood and The Prick,Prayer in C - Robin Schulz Radio Edit,189399,False,2014,66,0.76,0.886,9,-5.356,0,0.0258,0.0219,7.43e-06,0.623,0.78,123.002,"pop, rock" +Jeremih,Don't Tell 'Em,266840,True,2015,74,0.856,0.527,2,-5.225,1,0.0997,0.392,0.0,0.11,0.386,98.052,"hip hop, pop, R&B" +Nicki Minaj,Anaconda,260240,True,2014,66,0.964,0.605,9,-6.223,1,0.179,0.0668,7.78e-06,0.214,0.646,129.994,"hip hop, pop" +Calvin Harris,Blame (feat. John Newman),212960,False,2014,75,0.414,0.857,0,-4.078,0,0.0808,0.0287,0.00574,0.343,0.348,128.024,"hip hop, pop, Dance/Electronic" +DJ Snake,Turn Down for What,213733,False,2013,70,0.818,0.799,1,-4.1,0,0.156,0.00107,0.128,0.057,0.0815,100.014,"hip hop, pop, Dance/Electronic" +Pitbull,Timber (feat. Ke$ha),204160,False,2012,80,0.581,0.963,11,-4.087,1,0.0981,0.0295,0.0,0.139,0.788,129.992,"hip hop, pop, latin" +OneRepublic,Counting Stars,257386,False,2014,62,0.663,0.705,1,-4.972,0,0.0385,0.0654,0.0,0.117,0.477,122.014,pop +Bobby Shmurda,Hot N*gga,194561,True,2014,73,0.794,0.51,9,-7.314,1,0.42,0.0505,1.81e-06,0.0562,0.18,167.879,"hip hop, pop" +Calvin Harris,Summer,224506,False,2014,21,0.603,0.861,4,-3.565,0,0.0325,0.0215,0.043,0.1,0.72,127.962,"hip hop, pop, Dance/Electronic" +ScHoolboy Q,Man Of The Year,216013,True,2014,0,0.743,0.861,3,-5.234,1,0.0428,0.00518,0.0,0.181,0.391,111.959,"hip hop, pop" +Nico & Vinz,Am I Wrong,245866,False,2014,76,0.729,0.675,8,-6.003,1,0.0312,0.175,1.58e-06,0.55,0.779,119.968,"hip hop, pop" +Becky G,Shower,206166,False,2014,78,0.699,0.529,2,-7.548,1,0.0487,0.0317,3.59e-05,0.285,0.121,119.987,"pop, latin" +Sigma,Nobody To Love - Radio Edit,189720,False,2014,62,0.441,0.921,8,-3.32,1,0.0436,0.000526,6.34e-06,0.177,0.182,175.001,"pop, Dance/Electronic" +The Chainsmokers,#SELFIE - Original Mix,183750,False,2014,0,0.789,0.915,0,-3.263,1,0.248,0.0135,8.77e-06,0.0818,0.66,127.955,"pop, Dance/Electronic" +Lorde,Team,193058,False,2013,76,0.69,0.578,6,-7.436,1,0.0929,0.167,0.0,0.305,0.416,99.961,"pop, Dance/Electronic" +Rixton,Me And My Broken Heart,193733,False,2014,77,0.545,0.783,5,-4.261,0,0.0345,0.00489,0.0,0.132,0.551,174.084,pop +Klingande,Jubel - Radio Edit,201626,False,2013,50,0.686,0.524,0,-7.251,1,0.0343,0.649,0.0569,0.14,0.0951,124.938,"pop, Dance/Electronic" +Mr. Probz,Waves - Robin Schulz Radio Edit,208133,False,2014,2,0.829,0.51,5,-9.334,0,0.0369,0.00821,0.0014,0.0829,0.45,119.993,"hip hop, Dance/Electronic" +Paloma Faith,Only Love Can Hurt Like This,232893,False,2014,80,0.566,0.885,8,-4.528,1,0.0818,0.0958,9.97e-05,0.334,0.304,90.99,"pop, R&B, Dance/Electronic" +Kiesza,Hideaway,251986,False,2014,64,0.838,0.72,7,-4.135,0,0.0483,0.0862,0.00696,0.0772,0.203,122.993,"pop, Dance/Electronic" +Ellie Goulding,Burn,231211,False,2014,70,0.559,0.777,1,-5.031,1,0.0432,0.31,0.0,0.105,0.329,87.016,"pop, rock, Dance/Electronic" +Ariana Grande,Break Free,214840,False,2014,0,0.687,0.702,7,-5.324,0,0.0455,0.0064,4.35e-05,0.204,0.284,129.956,pop +Kid Ink,Show Me (feat. Chris Brown),217800,True,2014,68,0.711,0.508,7,-7.299,1,0.0502,0.0355,0.0,0.141,0.663,98.098,"hip hop, pop" +Tove Lo,Talking Body,238426,True,2014,66,0.736,0.761,4,-5.449,0,0.0338,0.0966,0.0,0.0823,0.114,119.999,"pop, Dance/Electronic" +Chris Brown,Loyal (feat. Lil Wayne & Tyga),264946,True,2014,72,0.841,0.522,10,-5.963,0,0.049,0.0168,1.37e-06,0.188,0.616,99.059,"hip hop, pop, R&B" +OneRepublic,Love Runs Out,224226,False,2014,59,0.719,0.935,7,-3.752,1,0.0589,0.167,0.0,0.0973,0.738,120.022,pop +Olly Murs,Up (feat. Demi Lovato),224293,False,2014,0,0.69,0.845,9,-4.676,1,0.0339,0.0193,0.0,0.101,0.638,114.948,"pop, Dance/Electronic" +Peking Duk,High (feat. Nicole Millar),228000,False,2014,48,0.525,0.803,0,-4.052,0,0.0492,0.00129,0.000868,0.29,0.383,100.094,Dance/Electronic +Milky Chance,Stolen Dance,313684,False,2014,73,0.885,0.581,11,-8.813,1,0.0378,0.427,0.000204,0.0759,0.728,114.016,set() +Ed Sheeran,Don't,219840,False,2014,72,0.806,0.608,1,-7.008,1,0.0659,0.0113,0.0,0.635,0.849,95.049,pop +Ariana Grande,Love Me Harder,236133,False,2014,74,0.472,0.714,1,-4.389,0,0.0334,0.00937,0.0,0.0764,0.24,98.992,pop +Enrique Iglesias,Bailando - Spanish Version,243386,False,2014,66,0.718,0.792,7,-3.519,1,0.105,0.0467,3.65e-06,0.0399,0.96,90.949,"pop, latin" +Avicii,Hey Brother,255093,False,2013,2,0.545,0.78,7,-4.867,0,0.0436,0.0309,4.64e-05,0.0828,0.458,125.014,"pop, Dance/Electronic" +Jessie J,Bang Bang,199386,False,2014,77,0.706,0.786,0,-3.417,0,0.091,0.26,0.0,0.38,0.751,150.028,"hip hop, pop" +Duke Dumont,I Got U,285596,False,2014,67,0.636,0.761,9,-7.752,0,0.035,0.00377,0.00784,0.0851,0.463,120.837,"pop, Dance/Electronic" +Gorgon City,Ready For Your Love,198880,False,2014,61,0.677,0.753,5,-7.038,1,0.154,0.0531,0.0128,0.333,0.471,121.939,"pop, Dance/Electronic" +Taylor Swift,Style,231000,False,2014,61,0.588,0.791,7,-5.595,1,0.0402,0.00245,0.00258,0.118,0.487,94.933,pop +MKTO,Classic,175426,False,2012,82,0.72,0.791,1,-4.689,1,0.124,0.0384,0.0,0.157,0.756,102.071,"hip hop, pop" +Disclosure,Latch,255631,False,2013,74,0.503,0.727,1,-5.456,1,0.167,0.0159,9.45e-05,0.0895,0.521,121.985,"pop, Dance/Electronic" +Ariana Grande,Problem,193920,False,2014,73,0.66,0.805,1,-5.352,0,0.153,0.0192,8.83e-06,0.159,0.625,103.008,pop +Charli XCX,Boom Clap,169866,False,2014,49,0.659,0.911,4,-2.28,1,0.0786,0.154,0.000304,0.191,0.576,91.999,"pop, Dance/Electronic" +One Direction,Steal My Girl,228133,False,2014,79,0.536,0.768,10,-5.948,0,0.0347,0.00433,0.0,0.114,0.545,77.217,pop +Tiësto,Red Lights,263890,False,2013,0,0.653,0.829,10,-4.783,1,0.0377,0.000739,1.5e-06,0.225,0.545,124.989,"pop, Dance/Electronic" +Selena Gomez,The Heart Wants What It Wants,227360,False,2014,58,0.616,0.789,7,-4.874,0,0.0377,0.053,0.0,0.142,0.621,83.066,pop +Rita Ora,I Will Never Let You Down,203466,False,2014,65,0.753,0.801,4,-3.215,1,0.0296,0.403,0.0,0.128,0.794,128.011,"hip hop, pop, Dance/Electronic" +5 Seconds of Summer,She Looks So Perfect,202496,False,2014,74,0.494,0.951,9,-4.237,1,0.132,0.000569,0.0,0.327,0.441,160.025,pop +The Script,Superheroes,245466,False,2014,74,0.49,0.885,3,-4.121,1,0.0396,0.00218,0.0,0.0741,0.64,166.996,"pop, rock" +Taylor Swift,Shake It Off,219200,False,2014,79,0.647,0.8,7,-5.384,1,0.165,0.0647,0.0,0.334,0.942,160.078,pop +One Direction,Night Changes,226600,False,2014,83,0.672,0.52,8,-7.747,1,0.0353,0.859,0.0,0.115,0.37,120.001,pop +Ed Sheeran,Sing,235382,False,2014,69,0.818,0.67,8,-4.451,0,0.0472,0.304,1.22e-06,0.0601,0.939,119.988,pop +Alesso,Heroes (we could be),209866,False,2014,52,0.521,0.754,5,-4.144,1,0.0634,0.0373,0.0,0.239,0.352,125.876,"pop, Dance/Electronic" +Tiësto,Wasted,188371,False,2014,0,0.645,0.832,2,-5.595,1,0.0294,0.00106,0.00264,0.199,0.375,112.028,"pop, Dance/Electronic" +G.R.L.,Ugly Heart,198306,False,2014,64,0.65,0.786,9,-5.488,1,0.0463,0.019,0.0,0.323,0.446,124.96,pop +Coldplay,A Sky Full of Stars,267866,False,2014,80,0.545,0.675,6,-6.474,1,0.0279,0.00617,0.00197,0.209,0.162,124.97,"rock, pop" +Clean Bandit,Rather Be (feat. Jess Glynne),227833,False,2014,72,0.799,0.586,11,-6.735,1,0.0377,0.162,2.03e-06,0.193,0.549,120.97,"pop, Dance/Electronic" +Zedd,Stay The Night - Featuring Hayley Williams Of Paramore,217346,False,2014,63,0.596,0.738,8,-3.109,1,0.0411,0.109,0.0,0.0947,0.461,127.961,"hip hop, pop, Dance/Electronic" +Sia,Elastic Heart,257200,False,2014,70,0.421,0.791,9,-4.998,1,0.0496,0.0117,1.48e-05,0.146,0.499,130.075,pop +One Direction,Story of My Life,245493,False,2013,81,0.6,0.663,3,-5.802,1,0.0477,0.225,0.0,0.119,0.286,121.07,pop +Alex & Sierra,Little Do You Know,185200,False,2014,75,0.5,0.292,0,-8.554,1,0.0323,0.746,0.0,0.188,0.37,145.879,pop +Omarion,Post to Be (feat. Chris Brown & Jhene Aiko),226580,True,2014,55,0.733,0.676,10,-5.655,0,0.0432,0.0697,0.0,0.208,0.701,97.448,"hip hop, pop, R&B" +Echosmith,Cool Kids,237626,False,2013,71,0.719,0.671,8,-6.279,1,0.0336,0.0372,8.21e-06,0.12,0.786,130.027,pop +Natalie La Rose,Somebody,189906,False,2014,66,0.83,0.52,0,-8.714,1,0.0376,0.000792,1.25e-05,0.0656,0.735,104.99,set() +Vance Joy,Riptide,204280,False,2014,80,0.484,0.731,1,-6.694,1,0.0379,0.431,0.0,0.151,0.51,101.654,"rock, pop" +Beyoncé,Drunk in Love (feat. Jay-Z),323480,True,2014,70,0.589,0.621,5,-6.902,0,0.0468,0.00969,0.00104,0.181,0.401,140.03,"pop, R&B" +Maroon 5,Sugar,235493,True,2014,0,0.744,0.783,1,-7.077,1,0.0337,0.0553,0.0,0.086,0.885,120.042,pop +Ella Henderson,Ghost,213213,False,2014,63,0.68,0.84,9,-3.823,1,0.0414,0.0457,8.66e-06,0.143,0.468,104.975,"pop, Dance/Electronic" +Ed Sheeran,"I See Fire - From ""The Hobbit - The Desolation Of Smaug""",300840,False,2013,71,0.581,0.0549,10,-20.514,0,0.0397,0.559,0.0,0.0718,0.234,152.037,pop +Chris Brown,New Flame (feat. Usher & Rick Ross),244226,True,2014,67,0.702,0.629,1,-4.292,1,0.041,0.0506,0.0,0.0963,0.446,141.967,"hip hop, pop, R&B" +Sheppard,Geronimo,218227,False,2014,0,0.707,0.771,7,-6.275,1,0.0783,0.434,0.0013,0.115,0.437,142.016,"pop, rock, Dance/Electronic" +Olly Murs,Wrapped Up (feat. Travie McCoy),185640,False,2014,0,0.787,0.848,1,-4.696,1,0.0549,0.0915,0.0,0.15,0.947,121.989,"pop, Dance/Electronic" +Sam Smith,Money On My Mind,192670,False,2014,62,0.688,0.841,2,-5.217,1,0.179,0.204,0.000295,0.229,0.743,133.912,pop +Jason Derulo,Trumpets,217306,False,2013,53,0.627,0.703,0,-4.884,1,0.236,0.563,0.0,0.0962,0.64,81.897,"hip hop, pop" +Nicki Minaj,Bed Of Lies,269946,True,2014,53,0.716,0.67,11,-5.779,1,0.194,0.251,0.0,0.109,0.124,86.01,"hip hop, pop" +Nick Jonas,Jealous - Remix,222213,True,2014,0,0.685,0.709,11,-3.578,0,0.0748,0.0132,0.0,0.452,0.504,93.047,pop +Mark Ronson,Uptown Funk (feat. Bruno Mars),269666,True,2015,82,0.856,0.609,0,-7.223,1,0.0824,0.00801,8.15e-05,0.0344,0.928,114.988,pop +Ed Sheeran,Thinking out Loud,281560,False,2014,81,0.781,0.445,2,-6.061,1,0.0295,0.474,0.0,0.184,0.591,78.998,pop +Wiz Khalifa,See You Again (feat. Charlie Puth),229525,False,2015,81,0.689,0.481,10,-7.503,1,0.0815,0.369,1.03e-06,0.0649,0.283,80.025,"hip hop, pop" +Justin Bieber,What Do You Mean?,205680,False,2015,77,0.845,0.567,5,-8.118,0,0.0956,0.59,0.00142,0.0811,0.793,125.02,pop +Major Lazer,Lean On (feat. MØ & DJ Snake),176561,False,2015,73,0.723,0.809,7,-3.081,0,0.0625,0.00346,0.00123,0.565,0.274,98.007,"hip hop, pop, Dance/Electronic" +The Weeknd,"Earned It (Fifty Shades Of Grey) - From The ""Fifty Shades Of Grey"" Soundtrack",252226,False,2014,76,0.659,0.381,2,-5.922,0,0.0304,0.385,0.0,0.0972,0.426,119.844,"pop, R&B" +Ellie Goulding,"Love Me Like You Do - From ""Fifty Shades Of Grey""",252534,False,2015,78,0.262,0.606,8,-6.646,1,0.0484,0.247,0.0,0.125,0.275,189.857,"pop, rock, Dance/Electronic" +Calvin Harris,Outside (feat. Ellie Goulding),227266,False,2014,78,0.646,0.823,2,-4.123,0,0.0394,0.213,0.0,0.322,0.418,128.035,"hip hop, pop, Dance/Electronic" +David Guetta,"Hey Mama (feat. Nicki Minaj, Bebe Rexha & Afrojack)",192560,False,2014,75,0.596,0.73,9,-4.091,1,0.151,0.24,0.0,0.325,0.525,85.979,"hip hop, pop, Dance/Electronic" +Demi Lovato,Cool for the Summer,214746,True,2015,0,0.586,0.613,5,-5.593,0,0.0386,0.00441,0.000183,0.0853,0.319,114.101,pop +Taylor Swift,Blank Space,231826,False,2014,82,0.76,0.703,5,-5.412,1,0.054,0.103,0.0,0.0913,0.57,95.997,pop +The Weeknd,The Hills,242253,True,2015,84,0.585,0.564,0,-7.063,0,0.0515,0.0671,0.0,0.135,0.137,113.003,"pop, R&B" +The Neighbourhood,Daddy Issues,260173,False,2015,85,0.588,0.521,10,-9.461,1,0.0329,0.0678,0.149,0.123,0.337,85.012,"rock, pop" +Deorro,Five More Hours,211975,False,2015,66,0.699,0.883,5,-3.226,0,0.219,0.0288,0.0,0.817,0.499,127.961,"pop, Dance/Electronic" +Avicii,The Nights,176658,False,2014,85,0.527,0.835,6,-5.298,1,0.0433,0.0166,0.0,0.249,0.654,125.983,"pop, Dance/Electronic" +DJ Snake,You Know You Like It,247266,False,2014,69,0.407,0.725,5,-5.346,0,0.188,0.0141,2.46e-06,0.306,0.247,196.093,"hip hop, pop, Dance/Electronic" +Adam Lambert,Ghost Town,208330,True,2015,46,0.703,0.736,4,-5.709,0,0.046,0.186,1.92e-05,0.174,0.274,119.955,pop +Rae Sremmurd,No Type,200080,True,2015,71,0.891,0.486,6,-7.803,1,0.159,0.0158,3.78e-06,0.0925,0.239,125.01,"hip hop, pop" +Taylor Swift,Bad Blood,200106,False,2015,70,0.654,0.655,11,-7.388,0,0.106,0.0294,0.0,0.139,0.221,170.16,pop +Flo Rida,GDFR (feat. Sage the Gemini & Lookas),190185,False,2015,49,0.657,0.827,5,-4.036,1,0.0734,0.000704,0.00534,0.065,0.69,145.889,"hip hop, pop" +Robin Schulz,Sugar (feat. Francesco Yates),219043,False,2015,76,0.636,0.815,5,-5.098,0,0.0581,0.0185,0.0,0.163,0.636,123.063,"pop, Dance/Electronic" +Calvin Harris,How Deep Is Your Love,212640,False,2015,76,0.738,0.868,11,-4.373,0,0.0731,0.0392,0.00169,0.388,0.336,122.003,"hip hop, pop, Dance/Electronic" +Philip George,Wish You Were Mine - Radio Edit,177560,False,2014,65,0.654,0.832,7,-4.164,0,0.0293,0.0126,0.0901,0.102,0.407,123.002,"pop, Dance/Electronic" +Trey Songz,Na Na,231906,False,2015,62,0.67,0.476,9,-6.253,0,0.0406,0.328,0.0,0.104,0.235,96.975,"hip hop, pop, R&B" +Fall Out Boy,Centuries,228360,False,2015,80,0.393,0.858,4,-2.868,0,0.0729,0.00359,0.0,0.102,0.56,176.042,rock +Rob $tone,Chill Bill (feat. J. Davi$ & Spooks),177184,True,2016,76,0.886,0.427,6,-10.028,1,0.145,0.0312,0.00099,0.0906,0.23,108.034,hip hop +Lost Frequencies,Are You With Me - Radio Edit,138842,False,2014,33,0.776,0.574,5,-9.882,0,0.0317,0.466,7.83e-05,0.131,0.412,121.03,"pop, Dance/Electronic" +Twenty One Pilots,Stressed Out,202333,False,2015,83,0.734,0.637,4,-5.677,0,0.141,0.0462,2.29e-05,0.0602,0.648,169.977,rock +Fifth Harmony,Worth It (feat. Kid Ink),224573,False,2015,73,0.884,0.765,8,-3.865,1,0.0882,0.063,7.04e-06,0.118,0.594,99.987,pop +KALEO,Way down We Go,219560,False,2015,63,0.59,0.578,10,-5.798,0,0.0528,0.612,0.000162,0.0837,0.264,81.663,"rock, classical" +Sam Smith,Like I Can,167065,False,2014,79,0.656,0.627,7,-6.627,0,0.0379,0.343,2.17e-05,0.124,0.481,99.933,pop +Major Lazer,Light It Up (feat. Nyla & Fuse ODG) - Remix,166138,False,2015,49,0.746,0.877,9,-3.782,0,0.0666,0.0375,0.000833,0.233,0.751,107.985,"hip hop, pop, Dance/Electronic" +Martin Solveig,Intoxicated - New Radio Mix,159693,False,2015,48,0.8,0.677,0,-4.023,1,0.0393,0.00563,0.00409,0.0838,0.547,125.004,"pop, Dance/Electronic" +Avicii,Waiting For Love,230613,False,2015,79,0.579,0.736,6,-3.863,0,0.0527,0.31,0.0,0.198,0.613,127.999,"pop, Dance/Electronic" +Felix Jaehn,Ain't Nobody (Loves Me Better) (feat. Jasmine Thompson),186146,False,2018,72,0.778,0.566,2,-6.959,0,0.0311,0.672,0.000197,0.0698,0.479,117.971,"hip hop, pop, Dance/Electronic" +Taylor Swift,Wildest Dreams,220440,False,2014,78,0.553,0.664,8,-7.417,1,0.0741,0.0709,0.0056,0.106,0.467,140.06,pop +The Weeknd,In The Night,235653,True,2015,0,0.48,0.682,7,-4.94,1,0.13,0.0696,0.0,0.0463,0.506,167.939,"pop, R&B" +Pitbull,Time of Our Lives,229360,True,2014,80,0.721,0.802,1,-5.797,1,0.0583,0.0921,0.0,0.694,0.724,124.022,"hip hop, pop, latin" +Galantis,Runaway (U & I),227073,False,2015,74,0.506,0.805,1,-4.119,1,0.0469,0.00711,0.00193,0.0856,0.383,126.008,"pop, Dance/Electronic" +Rihanna,Bitch Better Have My Money,219305,True,2015,73,0.781,0.728,1,-4.981,1,0.0621,0.0509,1.94e-06,0.257,0.395,102.99,"hip hop, pop, R&B" +Zara Larsson,Lush Life,202213,False,2015,0,0.658,0.741,7,-2.86,0,0.0536,0.141,0.0,0.189,0.789,98.024,"pop, Dance/Electronic" +One Direction,Drag Me Down,192120,False,2015,79,0.73,0.703,0,-5.672,0,0.0369,0.109,0.0,0.0657,0.595,138.113,pop +Travis Scott,Antidote,262693,True,2015,73,0.713,0.526,1,-5.046,1,0.032,0.00767,0.000148,0.124,0.131,131.05,"hip hop, Dance/Electronic" +Years & Years,King,215360,False,2015,0,0.559,0.848,4,-4.125,0,0.0388,0.0665,0.0,0.382,0.466,119.977,"pop, Dance/Electronic" +Demi Lovato,Confident,205745,True,2015,64,0.594,0.749,3,-6.251,1,0.0677,0.00188,0.000139,0.0869,0.344,130.064,pop +Lemaitre,Closer,270589,False,2017,59,0.583,0.785,2,-3.991,0,0.0337,0.0012,0.0127,0.124,0.355,91.977,pop +Selena Gomez,Same Old Love,229080,False,2015,69,0.672,0.593,11,-4.01,0,0.0304,0.0223,0.0,0.214,0.438,98.02,pop +Pia Mia,Do It Again,207746,True,2015,72,0.712,0.564,8,-6.527,1,0.047,0.0266,0.0,0.0874,0.411,95.95,"pop, R&B" +Flo Rida,My House,192190,False,2015,76,0.688,0.702,7,-4.792,0,0.0499,0.0215,0.0,0.128,0.74,94.006,"hip hop, pop" +Silentó,Watch Me (Whip / Nae Nae),185131,False,2015,0,0.819,0.768,8,-8.522,1,0.134,0.234,0.0,0.334,0.964,139.982,hip hop +Chris Brown,Ayo,225226,True,2015,71,0.823,0.563,6,-5.095,1,0.0443,0.0399,0.0,0.0851,0.682,98.031,"hip hop, pop, R&B" +The Strumbellas,Spirits,203653,False,2016,71,0.556,0.658,0,-6.075,1,0.0265,0.164,0.0,0.113,0.787,80.529,"rock, Folk/Acoustic, pop" +Fetty Wap,Trap Queen,222093,True,2015,68,0.746,0.873,7,-3.803,1,0.128,0.0244,0.0,0.354,0.817,148.075,"hip hop, pop" +Tove Lo,Talking Body,238426,True,2014,66,0.736,0.761,4,-5.449,0,0.0338,0.0966,0.0,0.0823,0.114,119.999,"pop, Dance/Electronic" +Beyoncé,7/11,213506,False,2014,69,0.747,0.705,9,-5.137,0,0.126,0.0128,0.0,0.126,0.56,136.024,"pop, R&B" +Lana Del Rey,High By The Beach,257573,True,2015,67,0.536,0.486,11,-11.067,0,0.0346,0.244,0.00788,0.12,0.0968,131.988,pop +Zara Larsson,Never Forget You,213427,False,2017,73,0.583,0.732,11,-5.728,0,0.0457,0.00312,9.86e-06,0.269,0.276,145.992,"pop, Dance/Electronic" +Jeremih,oui,238320,False,2015,77,0.418,0.724,5,-3.724,1,0.0964,0.213,0.0,0.112,0.604,78.521,"hip hop, pop, R&B" +Drake,Hotline Bling,267066,False,2016,77,0.891,0.628,2,-7.863,1,0.0551,0.00258,0.00019,0.0504,0.552,134.966,"hip hop, pop, R&B" +Selena Gomez,Kill Em With Kindness,217906,False,2015,64,0.757,0.884,10,-5.488,0,0.0404,0.00795,5.87e-05,0.0973,0.398,120.012,pop +Birdy,Let It All Go,280757,False,2015,68,0.383,0.43,10,-8.644,1,0.0302,0.817,1.04e-06,0.0869,0.174,107.005,pop +Fetty Wap,679 (feat. Remy Boyz),196693,True,2015,74,0.618,0.717,7,-5.738,1,0.318,0.00256,0.0,0.625,0.603,190.05,"hip hop, pop" +Ariana Grande,Love Me Harder,236133,False,2014,74,0.472,0.714,1,-4.389,0,0.0334,0.00937,0.0,0.0764,0.24,98.992,pop +The Weeknd,Can't Feel My Face,213520,False,2015,79,0.705,0.769,9,-5.526,0,0.0425,0.113,0.0,0.105,0.583,107.949,"pop, R&B" +SAYGRACE,You Don't Own Me (feat. G-Eazy),199314,False,2015,55,0.336,0.664,7,-5.68,1,0.102,0.166,1.31e-06,0.0575,0.294,186.394,"pop, R&B" +X Ambassadors,Renegades,195200,False,2015,75,0.526,0.862,2,-6.003,1,0.0905,0.0144,0.0597,0.229,0.528,90.052,"hip hop, rock, pop" +R. City,Locked Away (feat. Adam Levine),227480,False,2015,77,0.509,0.671,1,-5.709,1,0.0678,0.304,0.0,0.0452,0.55,118.413,pop +Kygo,Stole the Show,223186,False,2016,74,0.64,0.635,8,-7.565,0,0.226,0.271,0.0,0.319,0.475,100.034,"pop, Dance/Electronic" +Tame Impala,The Less I Know The Better,216320,True,2015,83,0.64,0.74,4,-4.083,1,0.0284,0.0115,0.00678,0.167,0.785,116.879,metal +Selena Gomez,Good For You,221560,True,2015,0,0.6,0.676,5,-6.447,0,0.0652,0.154,0.0,0.0741,0.217,88.928,pop +Fetty Wap,My Way (feat. Monty),213053,True,2015,55,0.748,0.741,6,-3.103,1,0.0531,0.00419,0.0,0.147,0.537,128.077,"hip hop, pop" +Hozier,Take Me To Church,241688,False,2014,81,0.566,0.664,4,-5.303,0,0.0464,0.634,0.0,0.116,0.437,128.945,"Folk/Acoustic, pop" +Taylor Swift,Style,231000,False,2014,77,0.588,0.791,7,-5.595,1,0.0402,0.00245,0.00258,0.118,0.487,94.933,pop +Alessia Cara,Here,199453,False,2015,0,0.379,0.799,0,-4.031,1,0.163,0.0804,0.0,0.0793,0.359,120.892,"pop, R&B" +Sam Smith,"Writing's On The Wall - From ""Spectre"" Soundtrack",278987,False,2015,66,0.261,0.302,5,-8.19,0,0.0339,0.769,2.57e-06,0.0863,0.0883,81.418,pop +David Guetta,Bang My Head (feat. Sia & Fetty Wap),193333,False,2015,47,0.599,0.869,0,-3.697,1,0.0789,0.0525,0.00719,0.103,0.593,108.061,"hip hop, pop, Dance/Electronic" +Meek Mill,All Eyes on You (feat. Chris Brown & Nicki Minaj),223973,True,2015,68,0.589,0.658,11,-5.288,0,0.203,0.0242,0.0,0.118,0.251,77.521,"hip hop, pop" +Big Sean,I Don't Fuck With You,284386,True,2015,75,0.824,0.733,1,-5.474,1,0.0613,0.0362,0.0,0.325,0.395,97.972,"hip hop, pop" +Kygo,Firestone,271640,False,2016,76,0.704,0.634,11,-7.374,0,0.0428,0.393,3.17e-05,0.0952,0.411,113.927,"pop, Dance/Electronic" +Drake,Jumpman,205879,True,2015,0,0.853,0.558,1,-7.448,1,0.202,0.0462,0.0,0.341,0.654,142.079,"hip hop, pop, R&B" +Petit Biscuit,Sunset Lover,237792,False,2015,71,0.783,0.467,6,-9.474,1,0.0503,0.729,0.809,0.109,0.236,90.838,Dance/Electronic +Shawn Mendes,Stitches,206880,False,2015,78,0.746,0.754,1,-6.684,1,0.0676,0.0152,0.0,0.0486,0.746,149.882,pop +Flo Rida,"I Don't Like It, I Love It (feat. Robin Thicke & Verdine White)",224258,False,2015,67,0.854,0.766,9,-4.697,0,0.141,0.0242,0.0,0.0793,0.784,118.004,"hip hop, pop" +Selena Gomez,The Heart Wants What It Wants,227373,False,2014,64,0.616,0.789,7,-4.874,0,0.0377,0.053,0.0,0.142,0.621,83.066,pop +Jack Ü,Where Are Ü Now (with Justin Bieber),250285,False,2015,74,0.432,0.781,4,-4.038,0,0.0567,0.041,4.21e-06,0.0789,0.197,139.432,"pop, Dance/Electronic" +Ellie Goulding,On My Mind,213445,False,2015,0,0.699,0.688,2,-6.607,0,0.0522,0.264,4.11e-06,0.0863,0.742,154.943,"pop, rock, Dance/Electronic" +Bryson Tiller,Don't,198293,True,2015,78,0.765,0.356,11,-5.556,0,0.195,0.223,0.0,0.0963,0.189,96.991,"hip hop, pop, R&B" +Rich Homie Quan,"Flex (Ooh, Ooh, Ooh)",176674,True,2015,1,0.676,0.615,6,-7.534,0,0.0496,0.0384,0.0,0.341,0.883,163.993,"hip hop, pop" +Joel Adams,Please Don't Go,210580,False,2015,71,0.513,0.768,4,-4.868,0,0.0587,0.0118,1.94e-05,0.294,0.235,84.264,R&B +Zedd,I Want You To Know,238800,False,2015,0,0.561,0.877,9,-2.215,0,0.0659,0.00621,8.11e-06,0.28,0.375,130.003,"hip hop, pop, Dance/Electronic" +Jonas Blue,Fast Car,212424,False,2015,67,0.644,0.57,9,-6.994,1,0.052,0.484,0.0,0.299,0.527,113.945,"pop, Dance/Electronic" +BØRNS,Electric Love,218173,False,2015,0,0.373,0.858,6,-6.536,0,0.0889,0.00407,0.0016,0.256,0.605,120.063,"rock, pop" +Sigala,Easy Love,229813,False,2018,66,0.68,0.942,9,-4.208,1,0.0631,0.175,0.0013,0.117,0.647,123.976,"pop, Dance/Electronic" +Justin Bieber,Sorry,200786,False,2015,80,0.654,0.76,0,-3.669,0,0.045,0.0797,0.0,0.299,0.41,99.945,pop +Ariana Grande,One Last Time,197266,False,2014,79,0.628,0.593,8,-5.036,1,0.0323,0.093,1.65e-06,0.096,0.104,125.026,pop +Little Mix,Black Magic,211773,False,2015,1,0.777,0.896,4,-4.467,1,0.0619,0.0352,0.0,0.317,0.843,111.987,pop +One Direction,Perfect,230333,False,2015,77,0.647,0.823,2,-5.231,1,0.0762,0.0598,0.0,0.119,0.396,99.933,pop +Years & Years,Shine,255506,False,2015,55,0.667,0.716,1,-5.21,0,0.0349,0.189,0.0,0.104,0.493,108.002,"pop, Dance/Electronic" +Meghan Trainor,Lips Are Movin,182666,False,2015,70,0.775,0.825,7,-5.402,1,0.0464,0.0506,1.03e-06,0.111,0.95,139.091,pop +Nicky Jam,El Perdón - Forgiveness,206666,False,2017,51,0.736,0.719,8,-3.839,1,0.0357,0.256,0.0,0.0909,0.656,90.013,latin +The Weeknd,Starboy,230453,True,2016,0,0.681,0.594,7,-7.028,1,0.282,0.165,3.49e-06,0.134,0.535,186.054,"pop, R&B" +Drake,One Dance,173986,False,2016,84,0.792,0.625,1,-5.609,1,0.0536,0.00776,0.0018,0.329,0.37,103.967,"hip hop, pop, R&B" +Justin Bieber,Love Yourself,233720,False,2015,0,0.609,0.378,4,-9.828,1,0.438,0.835,0.0,0.28,0.515,100.418,pop +The Chainsmokers,Closer,244960,False,2016,83,0.748,0.524,8,-5.599,1,0.0338,0.414,0.0,0.111,0.661,95.01,"pop, Dance/Electronic" +Adele,Hello,295502,False,2015,1,0.471,0.431,5,-6.129,0,0.0342,0.329,0.0,0.0854,0.289,157.98,"pop, R&B" +Desiigner,Panda,246761,True,2016,70,0.576,0.766,10,-4.943,0,0.449,0.028,1.68e-06,0.366,0.236,144.833,"hip hop, pop" +Burak Yeter,Tuesday (feat. Danelle Sandoval),241874,False,2016,72,0.841,0.639,9,-6.052,0,0.0688,0.0156,0.0654,0.0545,0.675,99.002,Dance/Electronic +Sia,Cheap Thrills (feat. Sean Paul),224813,False,2016,67,0.592,0.8,6,-4.931,0,0.215,0.0561,2.01e-06,0.0775,0.728,89.972,pop +OneRepublic,Wherever I Go,169773,False,2016,70,0.552,0.689,10,-6.444,0,0.0425,0.0915,0.0,0.27,0.349,99.961,pop +Rihanna,Work,219320,True,2016,75,0.725,0.534,11,-6.238,1,0.0946,0.0752,0.0,0.0919,0.558,91.974,"hip hop, pop, R&B" +Coldplay,Hymn for the Weekend,258266,False,2015,82,0.491,0.693,0,-6.487,0,0.0377,0.211,6.92e-06,0.325,0.412,90.027,"rock, pop" +Justin Bieber,Sorry,200786,False,2015,80,0.654,0.76,0,-3.669,0,0.045,0.0797,0.0,0.299,0.41,99.945,pop +Justin Timberlake,"CAN'T STOP THE FEELING! (from DreamWorks Animation's ""TROLLS"")",236001,False,2016,76,0.666,0.83,0,-5.715,1,0.0751,0.0123,0.0,0.191,0.702,113.03,pop +WILLOW,Wait a Minute!,196520,False,2015,86,0.764,0.705,3,-5.279,0,0.0278,0.0371,1.94e-05,0.0943,0.672,101.003,"pop, R&B, Dance/Electronic" +Desiigner,Tiimmy Turner,239853,True,2016,67,0.603,0.725,6,-3.054,1,0.0393,0.174,0.0,0.0786,0.304,122.803,"hip hop, pop" +G-Eazy,"Me, Myself & I",251466,True,2015,77,0.756,0.674,0,-6.518,0,0.0959,0.0184,0.0,0.158,0.389,111.995,"hip hop, pop" +David Guetta,This One's for You (feat. Zara Larsson) (Official Song UEFA EURO 2016),207272,False,2016,68,0.367,0.915,9,-3.456,0,0.0488,0.0022,5.81e-05,0.0905,0.365,110.169,"hip hop, pop, Dance/Electronic" +Astrid S,Hurts So Good,208728,False,2016,0,0.672,0.589,7,-5.008,0,0.049,0.082,0.0,0.0962,0.379,120.036,"rock, pop, Dance/Electronic" +Lil Wayne,"Sucker for Pain (with Wiz Khalifa, Imagine Dragons, Logic & Ty Dolla $ign feat. X Ambassadors)",243490,True,2016,75,0.502,0.786,9,-4.378,0,0.317,0.255,0.0,0.65,0.739,169.021,"hip hop, pop" +Twenty One Pilots,Heathens,195920,False,2016,80,0.732,0.396,4,-9.348,0,0.0286,0.0841,3.58e-05,0.105,0.548,90.024,rock +Future,Low Life (feat. The Weeknd),313546,True,2016,74,0.722,0.331,8,-7.789,1,0.0726,0.337,0.282,0.146,0.102,143.961,"hip hop, pop" +Galantis,No Money,189126,False,2016,67,0.671,0.916,6,-4.014,0,0.0397,0.0282,0.00762,0.24,0.803,126.01,"pop, Dance/Electronic" +The Chainsmokers,Don't Let Me Down,208373,False,2016,79,0.532,0.869,11,-5.094,1,0.172,0.157,0.00508,0.136,0.422,159.803,"pop, Dance/Electronic" +Tinie Tempah,Girls Like (feat. Zara Larsson),196000,True,2016,62,0.916,0.804,0,-3.406,1,0.049,0.37,1.18e-05,0.0812,0.538,120.028,"hip hop, pop, Dance/Electronic" +Alok,Hear Me Now,192846,False,2016,51,0.788,0.43,11,-7.757,1,0.0419,0.537,0.00261,0.0936,0.461,121.996,"pop, Dance/Electronic" +Jon Bellion,All Time Low,217603,True,2016,70,0.617,0.567,0,-4.188,1,0.0828,0.0584,0.0,0.0933,0.505,90.246,"hip hop, pop" +Cheat Codes,Sex,228361,False,2016,0,0.51,0.692,0,-5.825,1,0.171,0.00451,0.0,0.138,0.209,102.42,"pop, Dance/Electronic" +Meghan Trainor,NO,213506,False,2017,69,0.557,0.803,1,-3.599,0,0.213,0.0124,1.61e-06,0.737,0.657,92.134,pop +Dua Lipa,Hotter Than Hell,187957,False,2017,65,0.532,0.868,3,-4.23,0,0.0908,0.011,0.0,0.0584,0.529,110.127,pop +Martin Garrix,In the Name of Love,195706,False,2016,76,0.501,0.519,4,-5.88,0,0.0409,0.109,0.0,0.454,0.168,133.99,"pop, Dance/Electronic" +Alan Walker,Faded,212106,False,2018,78,0.468,0.627,6,-5.085,1,0.0476,0.0281,7.97e-06,0.11,0.159,179.642,Dance/Electronic +Tove Lo,Cool Girl,197815,True,2016,0,0.674,0.71,9,-7.042,0,0.111,0.343,6.71e-06,0.129,0.234,101.983,"pop, Dance/Electronic" +DJ Snake,Let Me Love You,205946,False,2016,81,0.649,0.716,8,-5.371,1,0.0349,0.0863,2.63e-05,0.135,0.163,99.988,"hip hop, pop, Dance/Electronic" +Calvin Harris,This Is What You Came For (feat. Rihanna),222160,False,2016,79,0.631,0.927,9,-2.787,0,0.0332,0.199,0.119,0.148,0.465,123.962,"hip hop, pop, Dance/Electronic" +Mike Posner,I Took A Pill In Ibiza - Seeb Remix,197933,True,2016,77,0.664,0.714,7,-6.645,0,0.111,0.0353,8.42e-06,0.0843,0.71,101.969,"hip hop, pop, Dance/Electronic" +Kevin Gates,2 Phones,240000,True,2016,69,0.895,0.681,7,-5.267,0,0.358,0.157,0.0,0.189,0.554,121.918,"hip hop, pop" +Rihanna,Needed Me,191600,True,2016,80,0.671,0.314,5,-8.091,0,0.244,0.11,0.0,0.0825,0.296,110.898,"hip hop, pop, R&B" +Ariana Grande,Side To Side,226160,True,2016,0,0.648,0.738,6,-5.883,0,0.247,0.0408,0.0,0.292,0.603,159.145,pop +Marshmello,Alone,273802,False,2016,71,0.631,0.953,2,-3.739,1,0.0343,0.0241,0.0155,0.108,0.422,141.99,"pop, Dance/Electronic" +Shawn Mendes,I Know What You Did Last Summer,223853,False,2015,73,0.687,0.761,9,-4.582,0,0.0876,0.102,0.0,0.147,0.743,113.939,pop +Selena Gomez,Same Old Love,229080,False,2015,69,0.672,0.593,11,-4.01,0,0.0304,0.0223,0.0,0.214,0.438,98.02,pop +Calvin Harris,My Way,219159,False,2016,74,0.818,0.913,4,-3.06,0,0.0426,0.093,3.69e-05,0.161,0.536,119.986,"hip hop, pop, Dance/Electronic" +Fitz and The Tantrums,HandClap,193253,False,2016,64,0.636,0.836,8,-3.004,1,0.0427,0.00609,0.000157,0.0828,0.715,139.956,"pop, rock" +Twenty One Pilots,Ride,214506,False,2015,80,0.645,0.713,6,-5.355,1,0.0393,0.00835,0.0,0.113,0.566,74.989,rock +Flo Rida,My House,192190,False,2015,76,0.688,0.702,7,-4.792,0,0.0499,0.0215,0.0,0.128,0.74,94.006,"hip hop, pop" +Ariana Grande,Dangerous Woman,235946,False,2016,0,0.677,0.604,7,-5.32,1,0.0385,0.0612,0.0,0.353,0.297,134.052,pop +Anne-Marie,Alarm,205593,True,2016,57,0.756,0.589,0,-5.093,1,0.232,0.0812,0.0,0.176,0.811,146.928,"pop, Dance/Electronic" +Charlie Puth,We Don't Talk Anymore (feat. Selena Gomez),217706,False,2016,79,0.728,0.563,1,-8.053,0,0.134,0.621,0.0,0.179,0.352,100.017,pop +AlunaGeorge,I'm In Control,209425,False,2016,0,0.734,0.847,11,-3.713,0,0.0346,0.355,0.000144,0.154,0.673,104.013,"pop, Dance/Electronic" +Enrique Iglesias,DUELE EL CORAZON (feat. Wisin),199693,False,2016,68,0.716,0.908,8,-3.254,0,0.103,0.0858,0.0,0.135,0.869,91.03,"pop, latin" +MNEK,Never Forget You,213427,False,2015,45,0.583,0.732,11,-5.728,0,0.0457,0.00312,9.86e-06,0.269,0.276,145.992,Dance/Electronic +Mike Perry,The Ocean (feat. Shy Martin),183414,False,2016,70,0.632,0.575,5,-6.478,1,0.029,0.0225,1.81e-06,0.104,0.188,90.037,"pop, Dance/Electronic" +Drake,Hotline Bling,267066,False,2016,0,0.903,0.62,2,-8.094,1,0.0587,0.00347,0.000119,0.0504,0.539,134.96,"hip hop, pop, R&B" +Ariana Grande,Into You,244453,False,2016,3,0.623,0.734,9,-5.948,1,0.107,0.0162,1.75e-06,0.145,0.37,107.853,pop +Shawn Mendes,Treat You Better,187973,False,2017,82,0.444,0.819,10,-4.078,0,0.341,0.106,0.0,0.107,0.747,82.695,pop +Tory Lanez,Say It,237786,True,2016,73,0.546,0.529,11,-10.511,0,0.0583,0.0862,6.5e-06,0.124,0.247,107.331,"hip hop, pop, R&B" +Coldplay,Adventure of a Lifetime,263786,False,2015,76,0.638,0.924,7,-3.887,1,0.036,0.00205,0.000175,0.149,0.53,111.995,"rock, pop" +Kungs,This Girl - Kungs Vs. Cookin' On 3 Burners,195561,False,2016,0,0.79,0.705,0,-4.684,0,0.0383,0.0807,4.81e-05,0.251,0.501,121.969,"pop, Dance/Electronic" +SAYGRACE,You Don't Own Me (feat. G-Eazy),201493,False,2016,69,0.332,0.635,7,-5.653,1,0.0898,0.159,2.79e-06,0.0599,0.261,186.249,"pop, R&B" +X Ambassadors,Renegades,195200,False,2015,75,0.526,0.862,2,-6.003,1,0.0905,0.0144,0.0597,0.229,0.528,90.052,"hip hop, rock, pop" +DNCE,Cake By The Ocean,219146,True,2016,79,0.774,0.753,4,-5.446,0,0.0517,0.152,0.0,0.0371,0.896,119.002,pop +99 Souls,The Girl Is Mine,216613,False,2016,55,0.683,0.943,9,-3.6,1,0.0397,0.00423,0.0972,0.0356,0.706,118.991,Dance/Electronic +Illy,Papercuts (feat. Vera Blue),255889,False,2016,51,0.369,0.618,6,-6.304,0,0.249,0.161,0.0,0.257,0.467,191.863,"hip hop, pop, Dance/Electronic" +Kiiara,Gold,225882,False,2015,55,0.6,0.412,8,-9.343,1,0.344,0.615,0.0025,0.134,0.408,113.049,"rock, pop, Dance/Electronic" +Kygo,Stole the Show,222813,False,2016,0,0.678,0.633,8,-6.443,0,0.171,0.151,0.0,0.147,0.478,99.886,"pop, Dance/Electronic" +Fifth Harmony,Work from Home (feat. Ty Dolla $ign),214480,False,2016,75,0.803,0.585,8,-5.861,1,0.0432,0.103,3.94e-06,0.0644,0.593,105.017,pop +Drake,Pop Style,212946,True,2016,63,0.713,0.462,7,-10.027,1,0.123,0.192,2.16e-06,0.105,0.0594,133.053,"hip hop, pop, R&B" +ZAYN,PILLOWTALK,203686,True,2016,59,0.588,0.702,11,-4.271,1,0.0496,0.104,0.0,0.089,0.429,124.909,"pop, Dance/Electronic" +Alessia Cara,Here,199453,False,2016,66,0.376,0.822,0,-3.974,1,0.104,0.0783,0.0,0.0841,0.327,120.493,"pop, R&B" +DJ Snake,Middle,220573,False,2016,0,0.611,0.7,9,-5.331,1,0.0436,0.0199,0.0,0.0549,0.213,104.981,"hip hop, pop, Dance/Electronic" +Sigala,Sweet Lovin' - Radio Edit,202149,False,2015,46,0.683,0.91,10,-1.231,1,0.0515,0.0553,4.78e-06,0.336,0.674,124.977,"pop, Dance/Electronic" +Drake,Jumpman,205879,True,2015,72,0.852,0.553,1,-7.286,1,0.187,0.0559,0.0,0.332,0.656,142.079,"hip hop, pop, R&B" +Charlie Puth,Dangerously,199133,False,2016,62,0.696,0.517,2,-8.379,0,0.0366,0.364,0.0,0.197,0.23,112.291,pop +Nick Jonas,Close,234213,False,2016,69,0.654,0.623,6,-5.273,0,0.082,0.253,0.0,0.144,0.401,123.996,pop +Jonas Blue,Perfect Strangers,196613,False,2016,0,0.742,0.819,1,-5.307,1,0.0487,0.372,0.0,0.277,0.709,117.986,"pop, Dance/Electronic" +Major Lazer,Cold Water (feat. Justin Bieber & MØ),185351,False,2016,0,0.608,0.798,6,-5.092,0,0.0432,0.0736,0.0,0.156,0.501,92.943,"hip hop, pop, Dance/Electronic" +The Chainsmokers,Roses,226738,False,2015,73,0.713,0.802,4,-7.055,1,0.0561,0.0435,0.00377,0.309,0.343,100.001,"pop, Dance/Electronic" +Tory Lanez,LUV,228640,True,2016,0,0.688,0.541,11,-8.128,0,0.114,0.0118,0.0,0.123,0.247,95.109,"hip hop, pop, R&B" +P!nk,"Just Like Fire (From the Original Motion Picture ""Alice Through The Looking Glass"")",215413,False,2016,68,0.632,0.702,7,-5.92,1,0.148,0.0114,0.0,0.108,0.523,162.958,pop +Bryson Tiller,Don't,198293,True,2015,78,0.765,0.356,11,-5.556,0,0.195,0.223,0.0,0.0963,0.189,96.991,"hip hop, pop, R&B" +Steve Aoki,Just Hold On,198774,False,2016,63,0.647,0.932,11,-3.515,1,0.0824,0.00383,1.5e-06,0.0574,0.374,114.991,"hip hop, pop, Dance/Electronic" +Clean Bandit,Tears (feat. Louisa Johnson),225914,False,2016,60,0.605,0.77,5,-3.645,0,0.0446,0.0431,0.0,0.159,0.298,130.037,"pop, Dance/Electronic" +Jonas Blue,Fast Car,212424,False,2015,1,0.459,0.587,9,-6.983,1,0.0785,0.453,0.0,0.307,0.581,113.901,"pop, Dance/Electronic" +Flume,Never Be Like You,233337,True,2016,0,0.443,0.558,0,-5.436,1,0.0624,0.441,0.0,0.163,0.248,116.838,Dance/Electronic +Bastille,Good Grief,206493,False,2016,65,0.73,0.758,1,-4.888,1,0.0653,0.147,0.0,0.311,0.877,120.041,"pop, rock" +Sia,The Greatest (feat. Kendrick Lamar),210226,False,2016,68,0.668,0.725,1,-6.127,1,0.266,0.0102,0.000479,0.0561,0.729,191.944,pop +PARTYNEXTDOOR,Not Nice,202661,False,2016,62,0.598,0.496,0,-9.309,1,0.0804,0.0253,0.00132,0.12,0.725,128.06,"hip hop, pop, R&B" +Garrett Nash,"i hate u, i love u (feat. olivia o'brien)",251033,True,2016,77,0.492,0.275,6,-13.4,0,0.3,0.687,0.0,0.101,0.18,92.6,"hip hop, pop" +Hilltop Hoods,1955,239280,False,2016,63,0.766,0.5,11,-7.558,1,0.0695,0.719,5.91e-06,0.304,0.71,84.044,"hip hop, pop" +Fifth Harmony,All In My Head (Flex) (feat. Fetty Wap),210573,False,2016,62,0.689,0.791,0,-5.194,0,0.053,0.023,0.0,0.0526,0.755,95.04,pop +Daya,"Sit Still, Look Pretty",202221,False,2016,1,0.657,0.739,2,-4.081,1,0.274,0.141,0.0,0.178,0.543,181.994,"rock, pop, Dance/Electronic" +Kent Jones,Don't Mind,198236,True,2016,63,0.464,0.771,8,-4.503,1,0.336,0.0235,0.0,0.063,0.69,158.777,hip hop +Flume,Say It,262521,True,2016,53,0.581,0.531,3,-6.829,0,0.0322,0.0731,3.27e-06,0.0616,0.273,75.255,Dance/Electronic +Bryson Tiller,Exchange,194613,True,2015,76,0.525,0.433,6,-10.598,1,0.185,0.107,0.0,0.135,0.276,160.108,"hip hop, pop, R&B" +MØ,Final Song,235826,False,2016,66,0.695,0.672,1,-6.109,0,0.0345,0.014,7.95e-05,0.0756,0.245,104.988,"pop, Dance/Electronic" +Beyoncé,Sorry,232560,True,2016,67,0.775,0.598,2,-7.274,1,0.0535,0.00175,4.44e-06,0.253,0.356,129.988,"pop, R&B" +Little Mix,Shout Out to My Ex,246240,False,2017,1,0.773,0.747,5,-4.061,1,0.0889,0.0239,8.23e-06,0.11,0.8,126.014,pop +The Lumineers,Ophelia,160097,False,2016,0,0.664,0.576,5,-6.429,1,0.0286,0.63,0.000198,0.0902,0.621,76.026,"pop, Folk/Acoustic" +Frank Ocean,Pink + White,184516,False,2016,79,0.544,0.552,9,-7.45,1,0.0991,0.67,4.57e-05,0.415,0.554,159.738,"hip hop, pop, R&B" +Drake,Too Good,263373,True,2016,74,0.794,0.653,7,-7.839,1,0.104,0.0489,4.88e-05,0.1,0.397,117.996,"hip hop, pop, R&B" +Ed Sheeran,Shape of You,233712,False,2017,84,0.825,0.652,1,-3.183,0,0.0802,0.581,0.0,0.0931,0.931,95.977,pop +Sean Paul,No Lie,221176,False,2018,84,0.742,0.882,7,-2.862,1,0.117,0.0466,0.0,0.206,0.463,102.04,"hip hop, pop" +Luis Fonsi,Despacito (Featuring Daddy Yankee),228200,False,2017,0,0.66,0.786,2,-4.757,1,0.17,0.209,0.0,0.112,0.846,177.833,"pop, latin" +Sam Smith,Too Good At Goodbyes,201000,False,2017,79,0.681,0.372,5,-8.237,1,0.0432,0.64,0.0,0.169,0.476,91.873,pop +Clean Bandit,Rockabye (feat. Sean Paul & Anne-Marie),251088,False,2016,75,0.72,0.763,9,-4.068,0,0.0523,0.406,0.0,0.18,0.742,101.965,"pop, Dance/Electronic" +Camila Cabello,Havana (feat. Young Thug),216896,False,2017,1,0.768,0.517,7,-4.323,0,0.0312,0.186,3.8e-05,0.104,0.418,104.992,pop +Imagine Dragons,Thunder,187146,False,2017,1,0.6,0.81,0,-4.749,1,0.0479,0.00683,0.21,0.155,0.298,167.88,rock +Maroon 5,Don't Wanna Know (feat. Kendrick Lamar),214265,False,2018,68,0.783,0.61,7,-6.124,1,0.0696,0.343,0.0,0.0983,0.418,100.047,pop +The Chainsmokers,Something Just Like This,247160,False,2017,83,0.617,0.635,11,-6.769,0,0.0317,0.0498,1.44e-05,0.164,0.446,103.019,"pop, Dance/Electronic" +Ed Sheeran,Perfect,263400,False,2017,85,0.599,0.448,8,-6.312,1,0.0232,0.163,0.0,0.106,0.168,95.05,pop +MGMT,Little Dark Age,299960,False,2018,81,0.705,0.712,6,-6.156,1,0.0385,0.0102,0.000855,0.1,0.62,97.512,rock +Travis Scott,goosebumps,243836,True,2016,83,0.841,0.728,7,-3.37,1,0.0484,0.0847,0.0,0.149,0.43,130.049,"hip hop, Dance/Electronic" +Billie Eilish,Bored,180933,False,2017,84,0.614,0.318,7,-12.695,1,0.0478,0.896,0.00239,0.0795,0.112,119.959,"pop, Dance/Electronic" +Future,Mask Off,204600,True,2017,79,0.833,0.434,2,-8.795,1,0.431,0.0102,0.0219,0.165,0.281,150.062,"hip hop, pop" +G-Eazy,Him & I,268866,True,2017,0,0.589,0.731,2,-6.343,1,0.0868,0.0534,0.0,0.308,0.191,87.908,"hip hop, pop" +Maroon 5,Cold (feat. Future),234308,True,2017,0,0.697,0.716,9,-6.288,0,0.113,0.118,0.0,0.0424,0.506,99.905,pop +Rae Sremmurd,Swang,208120,True,2016,75,0.681,0.314,8,-9.319,1,0.0581,0.2,9.82e-06,0.1,0.166,139.992,"hip hop, pop" +Lil Peep,Save That Shit,231546,True,2017,77,0.534,0.583,2,-8.672,1,0.0288,0.0262,0.0,0.421,0.145,105.997,hip hop +A$AP Ferg,Plain Jane,173600,True,2017,74,0.797,0.844,11,-5.482,1,0.275,0.0651,0.0,0.087,0.52,170.142,"hip hop, pop" +French Montana,Unforgettable,233901,True,2017,82,0.726,0.769,6,-5.043,1,0.123,0.0293,0.0101,0.104,0.733,97.985,"hip hop, pop" +21 Savage,Bank Account,220306,True,2017,74,0.884,0.347,8,-8.227,0,0.35,0.015,7e-06,0.0871,0.376,75.016,hip hop +Axwell /\ Ingrosso,More Than You Know,203000,False,2017,0,0.645,0.741,5,-4.989,0,0.0339,0.0323,0.0,0.29,0.534,123.07,"pop, Dance/Electronic" +ZAYN,Dusk Till Dawn (feat. Sia) - Radio Edit,239000,False,2017,77,0.259,0.437,11,-6.589,0,0.0386,0.102,1.32e-06,0.106,0.0951,180.042,"pop, Dance/Electronic" +J Balvin,Mi Gente,189029,False,2017,53,0.548,0.701,11,-4.862,0,0.0914,0.0178,1.34e-05,0.134,0.309,104.237,latin +Jax Jones,You Don't Know Me - Radio Edit,213946,True,2016,1,0.876,0.669,11,-6.054,0,0.138,0.163,0.0,0.185,0.682,124.007,"hip hop, pop, Dance/Electronic" +Rae Sremmurd,Black Beatles,291893,True,2016,75,0.794,0.632,0,-6.163,1,0.0649,0.142,0.0,0.128,0.355,145.926,"hip hop, pop" +Zara Larsson,Ain't My Fault,224030,False,2017,67,0.576,0.782,6,-4.825,0,0.0296,0.00778,0.0,0.285,0.355,141.153,"pop, Dance/Electronic" +Travis Scott,BUTTERFLY EFFECT,190677,False,2017,0,0.763,0.598,11,-6.865,1,0.0539,0.0714,0.0,0.112,0.182,140.987,"hip hop, Dance/Electronic" +Ayo & Teo,Rolex,238586,False,2017,69,0.804,0.886,1,-2.512,1,0.04,0.0837,0.0,0.266,0.789,144.946,"hip hop, pop" +Lil Uzi Vert,XO Tour Llif3,182706,True,2017,81,0.732,0.75,11,-6.366,0,0.231,0.00264,0.0,0.109,0.401,155.096,hip hop +Imagine Dragons,Whatever It Takes,201240,False,2017,80,0.672,0.655,10,-5.021,0,0.0311,0.0362,0.0,0.117,0.556,134.945,rock +Rob $tone,Chill Bill (feat. J. Davi$ & Spooks),177184,True,2016,76,0.886,0.427,6,-10.028,1,0.145,0.0312,0.00099,0.0906,0.23,108.034,hip hop +Jason Derulo,Swalla (feat. Nicki Minaj & Ty Dolla $ign),216408,True,2017,75,0.696,0.817,1,-3.862,1,0.109,0.075,0.0,0.187,0.782,98.064,"hip hop, pop" +G-Eazy,No Limit,245386,True,2017,0,0.838,0.771,1,-3.791,1,0.244,0.0117,0.0,0.0853,0.405,175.957,"hip hop, pop" +Playboi Carti,Magnolia,181812,True,2017,77,0.791,0.582,11,-7.323,0,0.286,0.0114,0.0,0.35,0.443,162.991,hip hop +Chord Overstreet,Hold On,198853,False,2017,80,0.618,0.443,2,-9.681,1,0.0526,0.469,0.0,0.0829,0.167,119.949,"pop, Folk/Acoustic" +Jax Jones,Breathe,207629,False,2017,58,0.722,0.744,11,-5.52,0,0.0363,0.0234,0.000157,0.143,0.686,125.985,"hip hop, pop, Dance/Electronic" +Imagine Dragons,Believer,204346,False,2017,1,0.779,0.787,10,-4.305,0,0.108,0.0524,0.0,0.14,0.708,124.982,rock +Natti Natasha,Criminal,232549,False,2017,74,0.814,0.813,2,-3.023,0,0.0561,0.03,9.33e-05,0.255,0.839,79.997,"pop, latin" +Kendrick Lamar,HUMBLE.,177000,True,2017,0,0.906,0.625,1,-6.779,0,0.0903,0.000243,3.23e-05,0.0975,0.423,150.018,hip hop +Selena Gomez,Fetish (feat. Gucci Mane),186112,False,2017,69,0.708,0.618,2,-4.424,1,0.0592,0.0204,6.81e-06,0.062,0.265,123.013,pop +Charlie Puth,Attention,211475,False,2017,0,0.774,0.626,3,-4.432,0,0.0432,0.0969,3.12e-05,0.0848,0.777,100.041,pop +Big Shaq,Man's Not Hot,186026,False,2017,62,0.905,0.884,0,-4.076,1,0.236,0.111,8.31e-05,0.107,0.588,135.048,"pop, Dance/Electronic" +Anne-Marie,Ciao Adios,200104,False,2017,57,0.698,0.882,4,-3.078,0,0.0863,0.127,0.0,0.15,0.445,106.083,"pop, Dance/Electronic" +Rag'n'Bone Man,Human,200186,False,2017,76,0.602,0.707,9,-4.097,1,0.302,0.393,0.0,0.165,0.554,75.087,R&B +blackbear,do re mi,212027,True,2017,4,0.745,0.593,8,-6.35,1,0.0526,0.00522,5.25e-06,0.123,0.17,111.002,"hip hop, pop, Dance/Electronic" +Lil Pump,Gucci Gang,124055,True,2017,64,0.936,0.523,5,-6.71,1,0.0597,0.239,0.0,0.117,0.699,119.889,hip hop +Taylor Swift,Look What You Made Me Do,211853,False,2017,76,0.766,0.709,9,-6.471,0,0.123,0.204,1.41e-05,0.126,0.506,128.07,pop +Enrique Iglesias,SUBEME LA RADIO (feat. Descemer Bueno & Zion & Lennox),207680,False,2017,70,0.688,0.822,0,-3.304,1,0.0537,0.0642,0.0,0.241,0.66,91.011,"pop, latin" +Kodak Black,Tunnel Vision,268186,True,2017,51,0.497,0.489,11,-7.724,0,0.294,0.0576,9.91e-05,0.122,0.231,171.853,hip hop +Kendrick Lamar,DNA.,185946,True,2017,0,0.636,0.517,0,-6.759,1,0.36,0.00402,0.0,0.0874,0.394,139.928,hip hop +A Boogie Wit da Hoodie,Drowning (feat. Kodak Black),209269,True,2017,0,0.839,0.81,5,-5.274,0,0.0568,0.501,0.0,0.117,0.814,129.014,hip hop +XXXTENTACION,Jocelyn Flores,119133,True,2017,83,0.872,0.391,0,-9.144,0,0.242,0.469,4.13e-06,0.297,0.437,134.021,hip hop +Migos,Bad and Boujee (feat. Lil Uzi Vert),343150,True,2017,72,0.926,0.666,11,-5.314,1,0.244,0.0611,0.0,0.123,0.168,127.079,"hip hop, pop" +Yo Gotti,Rake It Up (feat. Nicki Minaj),276333,True,2017,67,0.91,0.444,1,-8.126,0,0.344,0.022,0.0,0.137,0.53,149.953,"hip hop, pop" +Martin Garrix,Scared to Be Lonely,220883,False,2017,77,0.584,0.54,1,-7.786,0,0.0576,0.0895,0.0,0.261,0.195,137.972,"pop, Dance/Electronic" +Jonas Blue,Mama,181614,False,2017,51,0.739,0.792,11,-4.256,0,0.0418,0.091,0.0,0.0516,0.547,104.016,"pop, Dance/Electronic" +Martin Jensen,Solo Dance,174933,False,2016,0,0.744,0.836,6,-2.396,0,0.0507,0.0435,0.0,0.194,0.36,114.965,"pop, Dance/Electronic" +Taylor Swift,...Ready For It?,208186,False,2017,73,0.613,0.764,2,-6.509,1,0.136,0.0527,0.0,0.197,0.417,160.015,pop +David Guetta,2U (feat. Justin Bieber),194896,False,2017,67,0.548,0.65,8,-5.827,0,0.0591,0.219,0.0,0.225,0.557,144.937,"hip hop, pop, Dance/Electronic" +Migos,Slippery (feat. Gucci Mane),304041,True,2017,68,0.92,0.675,1,-5.661,0,0.263,0.307,0.0,0.104,0.749,141.967,"hip hop, pop" +Shawn Mendes,There's Nothing Holdin' Me Back,199440,False,2017,0,0.857,0.8,2,-4.035,1,0.0583,0.381,0.0,0.0913,0.966,121.996,pop +G-Eazy,Good Life (with G-Eazy & Kehlani),225520,False,2017,63,0.572,0.778,1,-5.208,1,0.233,0.00638,0.0,0.0563,0.54,168.073,"hip hop, pop" +The Vamps,All Night,197640,False,2017,73,0.538,0.804,8,-5.194,1,0.0358,0.0041,0.0,0.33,0.507,144.992,pop +Dua Lipa,New Rules,212000,False,2017,61,0.763,0.72,9,-5.181,0,0.0691,0.00261,1.11e-05,0.114,0.592,116.01,pop +Cardi B,Bodak Yellow,223962,True,2017,59,0.929,0.723,11,-5.792,0,0.109,0.0672,0.0,0.346,0.458,125.022,"hip hop, pop" +Rag'n'Bone Man,Skin,239626,False,2017,67,0.564,0.745,0,-7.733,1,0.31,0.265,0.0,0.147,0.351,170.661,R&B +Zara Larsson,I Would Like,226720,False,2017,57,0.486,0.713,2,-3.949,0,0.0524,0.0853,0.0,0.0839,0.297,121.028,"pop, Dance/Electronic" +DJ Khaled,Wild Thoughts (feat. Rihanna & Bryson Tiller),204173,True,2017,72,0.671,0.672,0,-3.094,0,0.0688,0.0329,0.0,0.118,0.632,97.979,"hip hop, pop" +Post Malone,Congratulations,220293,True,2016,81,0.63,0.804,6,-4.183,1,0.0363,0.215,0.0,0.253,0.492,123.146,hip hop +Avicii,Without You (feat. Sandro Cavazza),181672,True,2017,77,0.662,0.858,2,-4.844,1,0.0428,0.00163,0.0,0.0456,0.295,133.993,"pop, Dance/Electronic" +Clean Bandit,Symphony (feat. Zara Larsson),214866,False,2017,0,0.718,0.609,0,-4.699,0,0.043,0.281,3.29e-05,0.234,0.497,122.95,"pop, Dance/Electronic" +Katy Perry,Chained To The Rhythm,237733,False,2017,69,0.562,0.8,0,-5.404,1,0.112,0.0814,0.0,0.199,0.471,95.029,pop +Zay Hilfigerrr,Juju on That Beat (TZ Anthem),144244,False,2016,57,0.807,0.887,1,-3.892,1,0.275,0.00381,0.0,0.391,0.78,160.517,set() +Calvin Harris,"Feels (feat. Pharrell Williams, Katy Perry & Big Sean)",223413,True,2017,79,0.893,0.745,11,-3.105,0,0.0571,0.0642,0.0,0.0943,0.872,101.018,"hip hop, pop, Dance/Electronic" +Migos,T-Shirt,242407,True,2017,64,0.865,0.687,10,-3.744,0,0.217,0.242,0.0,0.158,0.486,139.023,"hip hop, pop" +OneRepublic,Rich Love (with Seeb),201256,False,2017,64,0.401,0.872,1,-3.641,0,0.314,0.167,0.0,0.362,0.472,104.592,pop +Maggie Lindemann,Pretty Girl - Cheat Codes X CADE Remix,193613,True,2017,61,0.703,0.868,7,-4.661,0,0.0291,0.15,0.132,0.104,0.733,121.03,"rock, pop" +Macklemore,Glorious (feat. Skylar Grey),220454,True,2017,74,0.731,0.794,0,-5.126,0,0.0522,0.0323,2.59e-05,0.112,0.356,139.994,"hip hop, pop" +Hayden James,NUMB,217296,True,2019,47,0.617,0.558,10,-7.046,0,0.0431,0.184,1.03e-06,0.0911,0.4,147.932,"pop, Dance/Electronic" +Liam Payne,Strip That Down,204502,False,2017,1,0.869,0.485,6,-5.595,1,0.0545,0.246,0.0,0.0765,0.527,106.028,pop +Drake,Passionfruit,298940,True,2017,0,0.809,0.463,11,-11.377,1,0.0396,0.256,0.085,0.109,0.364,111.98,"hip hop, pop, R&B" +Justin Bieber,Friends (with BloodPop®),189466,False,2017,0,0.744,0.739,8,-5.35,1,0.0387,0.00459,0.0,0.306,0.649,104.99,pop +ZAYN,"I Don’t Wanna Live Forever (Fifty Shades Darker) - From ""Fifty Shades Darker (Original Motion Picture Soundtrack)""",245200,False,2016,0,0.735,0.451,0,-8.374,1,0.0585,0.0631,1.3e-05,0.325,0.0862,117.973,"pop, Dance/Electronic" +Alessia Cara,Scars To Your Beautiful,230226,False,2016,76,0.319,0.739,0,-5.74,1,0.272,0.0285,0.0,0.111,0.449,194.169,"pop, R&B" +2 Chainz,It's A Vibe,210200,True,2017,71,0.822,0.502,7,-7.38,1,0.148,0.0312,0.000887,0.114,0.525,73.003,"hip hop, pop" +Starley,Call on Me - Ryan Riback Remix,222040,False,2016,48,0.67,0.838,0,-4.031,1,0.0362,0.0605,0.000611,0.159,0.716,105.0,"pop, Dance/Electronic" +AJR,Weak,201160,False,2017,0,0.673,0.637,5,-4.518,1,0.0429,0.137,0.0,0.184,0.678,123.98,rock +Lana Del Rey,Lust for Life (with The Weeknd),264011,False,2017,0,0.499,0.676,0,-8.618,0,0.0582,0.588,0.00484,0.358,0.295,99.979,pop +CNCO,Reggaetón Lento (Remix),188786,False,2017,0,0.623,0.909,4,-3.079,0,0.0758,0.158,0.0,0.0972,0.651,93.984,"pop, latin" +The Vamps,Middle Of The Night,174600,False,2017,61,0.588,0.749,11,-4.323,0,0.058,0.0037,0.0,0.0813,0.397,130.093,pop +The Weeknd,I Feel It Coming,269186,False,2016,78,0.773,0.819,0,-5.946,0,0.118,0.428,0.0,0.0679,0.585,92.987,"pop, R&B" +The Chainsmokers,Paris,221506,False,2017,73,0.653,0.658,2,-6.428,1,0.0304,0.0215,1.66e-06,0.0939,0.219,99.99,"pop, Dance/Electronic" +Big Sean,Bounce Back,222360,True,2017,71,0.78,0.575,1,-5.628,0,0.139,0.106,0.0,0.129,0.273,81.502,"hip hop, pop" +Shawn Mendes,Mercy,208733,False,2017,0,0.561,0.674,4,-4.882,0,0.0818,0.118,0.0,0.111,0.383,148.127,pop +Avicii,Lonely Together (feat. Rita Ora),181812,False,2017,53,0.655,0.666,5,-5.309,1,0.0612,0.134,2.12e-05,0.0683,0.272,102.977,"pop, Dance/Electronic" +Cheat Codes,No Promises (feat. Demi Lovato),223503,False,2017,68,0.741,0.667,10,-5.445,1,0.134,0.0575,0.0,0.106,0.595,112.956,"pop, Dance/Electronic" +Bruno Mars,24K Magic,225983,False,2016,78,0.818,0.803,1,-4.282,1,0.0797,0.034,0.0,0.153,0.632,106.97,pop +Lorde,Homemade Dynamite - REMIX,214254,True,2017,8,0.781,0.548,0,-4.997,0,0.0764,0.229,0.0,0.127,0.175,106.996,"pop, Dance/Electronic" +Kygo,It Ain't Me (with Selena Gomez),220780,False,2017,75,0.64,0.533,0,-6.596,1,0.0706,0.119,0.0,0.0864,0.515,99.968,"pop, Dance/Electronic" +Drake,God's Plan,198973,True,2018,81,0.754,0.449,7,-9.211,1,0.109,0.0332,8.29e-05,0.552,0.357,77.169,"hip hop, pop, R&B" +Post Malone,rockstar (feat. 21 Savage),218146,True,2018,83,0.585,0.52,5,-6.136,0,0.0712,0.124,7.01e-05,0.131,0.129,159.801,hip hop +Offset,Ric Flair Drip (with Metro Boomin),172800,True,2017,80,0.88,0.428,9,-8.28,1,0.206,0.149,5.05e-05,0.114,0.333,100.007,"hip hop, pop" +G-Eazy,Him & I (with Halsey),268866,True,2017,75,0.589,0.731,2,-6.343,1,0.0868,0.0534,0.0,0.308,0.191,87.908,"hip hop, pop" +Dynoro,In My Mind,184560,False,2018,78,0.694,0.77,6,-5.335,1,0.149,0.176,1.1e-05,0.118,0.163,125.905,"pop, Dance/Electronic" +Juice WRLD,Lucid Dreams,239835,True,2018,84,0.511,0.566,6,-7.23,0,0.2,0.349,0.0,0.34,0.218,83.903,hip hop +Zedd,The Middle,184732,False,2018,80,0.753,0.657,7,-3.061,1,0.0449,0.171,0.0,0.112,0.437,107.01,"hip hop, pop, Dance/Electronic" +Marshmello,FRIENDS,202620,True,2018,78,0.626,0.88,9,-2.384,0,0.0504,0.205,0.0,0.128,0.534,95.079,"pop, Dance/Electronic" +Cardi B,I Like It,253390,True,2018,79,0.816,0.726,5,-3.998,0,0.129,0.099,0.0,0.372,0.65,136.048,"hip hop, pop" +XXXTENTACION,Moonlight,135090,True,2018,82,0.921,0.537,9,-5.723,0,0.0804,0.556,0.00404,0.102,0.711,128.009,hip hop +Ariana Grande,no tears left to cry,205920,False,2018,77,0.699,0.713,9,-5.507,0,0.0594,0.04,3.11e-06,0.294,0.354,121.993,pop +Sheck Wes,Mo Bamba,183906,True,2018,77,0.729,0.625,4,-5.266,1,0.0315,0.194,0.00986,0.248,0.261,146.034,hip hop +Calvin Harris,One Kiss (with Dua Lipa),214846,False,2018,81,0.791,0.862,9,-3.24,0,0.11,0.037,2.19e-05,0.0814,0.592,123.994,"hip hop, pop, Dance/Electronic" +Tyga,Taste (feat. Offset),232959,True,2018,73,0.884,0.559,0,-7.442,1,0.12,0.0236,0.0,0.101,0.342,97.994,"hip hop, pop" +Travis Scott,SICKO MODE,312820,True,2018,81,0.834,0.73,8,-3.714,1,0.222,0.00513,0.0,0.124,0.446,155.008,"hip hop, Dance/Electronic" +Imagine Dragons,Natural,189466,False,2018,80,0.704,0.611,2,-6.112,1,0.0409,0.217,0.0,0.0812,0.22,100.0,rock +DJ Snake,"Taki Taki (with Selena Gomez, Ozuna & Cardi B)",212500,True,2018,76,0.842,0.801,8,-4.167,0,0.228,0.157,4.82e-06,0.0642,0.617,95.881,"hip hop, pop, Dance/Electronic" +Marshmello,Spotlight,177600,False,2018,71,0.546,0.822,7,-4.483,1,0.151,0.0145,0.0,0.742,0.615,150.06,"pop, Dance/Electronic" +The Weeknd,Pray For Me (with Kendrick Lamar),211440,True,2018,69,0.735,0.677,2,-4.979,1,0.093,0.0762,2.17e-05,0.111,0.188,100.584,"pop, R&B" +Billie Eilish,lovely (with Khalid),200185,False,2018,86,0.351,0.296,4,-10.109,0,0.0333,0.934,0.0,0.095,0.12,115.284,"pop, Dance/Electronic" +Migos,Walk It Talk It,276147,True,2018,75,0.907,0.633,2,-5.145,1,0.184,0.0876,2.6e-06,0.106,0.395,145.914,"hip hop, pop" +Clean Bandit,Solo (feat. Demi Lovato),222653,False,2018,71,0.737,0.636,11,-4.546,0,0.0437,0.0441,6.66e-05,0.35,0.565,105.005,"pop, Dance/Electronic" +Drake,Nonstop,238614,True,2018,77,0.912,0.412,7,-8.074,1,0.123,0.0165,0.0126,0.104,0.423,154.983,"hip hop, pop, R&B" +6ix9ine,FEFE,179404,True,2018,42,0.931,0.387,1,-9.127,1,0.412,0.088,0.0,0.136,0.376,125.978,hip hop +The Weeknd,Call Out My Name,228373,False,2018,80,0.461,0.593,1,-4.954,1,0.0356,0.17,0.0,0.307,0.175,134.17,"pop, R&B" +Nio Garcia,Te Boté - Remix,417920,True,2018,76,0.903,0.675,11,-3.445,0,0.214,0.542,1.28e-05,0.0595,0.442,96.507,latin +Migos,MotorSport,303076,True,2018,72,0.904,0.518,2,-5.32,1,0.183,0.0305,0.0,0.325,0.188,137.996,"hip hop, pop" +Nicky Jam,X,172854,False,2019,74,0.594,0.749,9,-5.298,0,0.056,0.0338,0.00106,0.3,0.694,179.968,latin +XXXTENTACION,SAD!,166605,True,2018,82,0.74,0.613,8,-4.88,1,0.145,0.258,0.00372,0.123,0.473,75.023,hip hop +Becky G,Sin Pijama,188560,False,2018,69,0.791,0.745,11,-3.695,0,0.0464,0.354,2.93e-05,0.104,0.82,94.014,"pop, latin" +Machine Gun Kelly,"Home (with Machine Gun Kelly, X Ambassadors & Bebe Rexha)",202804,False,2017,71,0.653,0.718,3,-5.232,0,0.213,0.00413,0.0,0.0537,0.216,82.034,"hip hop, pop" +6ix9ine,GUMMO,157643,True,2018,63,0.66,0.775,2,-4.926,1,0.172,0.0553,0.0,0.129,0.635,157.036,hip hop +Jax Jones,Breathe,207629,False,2017,58,0.722,0.744,11,-5.52,0,0.0363,0.0234,0.000157,0.143,0.686,125.985,"hip hop, pop, Dance/Electronic" +The Chainsmokers,Sick Boy,193200,False,2018,69,0.663,0.577,11,-7.518,0,0.0531,0.109,0.0,0.12,0.454,89.996,"pop, Dance/Electronic" +Jay Rock,"King's Dead (with Kendrick Lamar, Future & James Blake)",229670,True,2018,71,0.645,0.705,1,-5.008,1,0.299,0.000813,0.0,0.128,0.376,137.133,"hip hop, pop" +Rich The Kid,Plug Walk,175229,True,2018,72,0.876,0.519,11,-6.531,1,0.143,0.202,0.0,0.108,0.158,94.981,"hip hop, pop" +Kendrick Lamar,All The Stars (with SZA),232186,True,2018,80,0.698,0.633,8,-4.946,1,0.0597,0.0605,0.000194,0.0926,0.552,96.924,hip hop +Logic,Everyday,204746,True,2018,69,0.667,0.741,1,-4.099,1,0.0378,0.0425,0.0,0.0761,0.422,149.908,"hip hop, pop" +Jason Derulo,Tip Toe (feat. French Montana),187521,False,2017,64,0.845,0.709,10,-4.547,0,0.0714,0.0233,0.0,0.094,0.62,98.062,"hip hop, pop" +Ariana Grande,God is a woman,197546,True,2018,78,0.602,0.658,1,-5.934,1,0.0558,0.0233,6e-05,0.237,0.268,145.031,pop +Rae Sremmurd,Powerglide (feat. Juicy J) - From SR3MM,332300,True,2018,66,0.713,0.831,1,-4.75,0,0.15,0.0168,0.0,0.118,0.584,173.948,"hip hop, pop" +EO,German,170825,False,2018,67,0.862,0.583,8,-6.26,0,0.0654,0.811,1.86e-05,0.191,0.852,103.019,set() +Dennis Lloyd,Nevermind,156600,False,2017,74,0.6,0.688,5,-8.339,0,0.201,0.159,1.29e-05,0.409,0.0793,99.977,pop +girl in red,we fell in love in october,184153,False,2018,82,0.566,0.366,7,-12.808,1,0.028,0.113,0.181,0.155,0.237,129.959,"pop, rock, Dance/Electronic" +Selena Gomez,Wolves,197993,False,2017,78,0.724,0.804,11,-4.614,0,0.0448,0.124,0.0,0.204,0.306,124.987,pop +Tiësto,Jackie Chan,215759,True,2018,72,0.747,0.834,3,-2.867,0,0.045,0.374,0.0,0.0586,0.687,128.005,"pop, Dance/Electronic" +Tom Walker,Leave a Light On,185863,False,2017,70,0.586,0.624,5,-5.946,1,0.113,0.0153,1.78e-06,0.133,0.267,68.976,"rock, pop" +Sofía Reyes,"1, 2, 3 (feat. Jason Derulo & De La Ghetto)",201526,False,2018,65,0.792,0.895,1,-3.112,0,0.0589,0.165,0.0,0.0501,0.794,94.968,"pop, latin" +Marshmello,Silence,180822,False,2017,80,0.52,0.761,4,-3.093,1,0.0853,0.256,4.96e-06,0.17,0.286,141.971,"pop, Dance/Electronic" +5 Seconds of Summer,Youngblood,203417,False,2018,69,0.596,0.854,7,-5.114,0,0.463,0.0169,0.0,0.124,0.152,120.274,pop +Loud Luxury,Body (feat. brando),163216,False,2017,0,0.752,0.764,1,-4.399,1,0.038,0.0476,9.44e-05,0.0543,0.582,121.958,"pop, Dance/Electronic" +Liam Payne,For You (Fifty Shades Freed) (& Rita Ora),245453,False,2018,62,0.541,0.787,2,-4.618,0,0.0331,0.0167,0.0,0.157,0.2,113.0,pop +NF,Let You Down,212120,False,2017,79,0.662,0.714,5,-5.68,0,0.121,0.312,0.0,0.179,0.464,147.997,"hip hop, pop" +XXXTENTACION,changes,121886,False,2018,79,0.669,0.308,11,-10.068,1,0.029,0.883,0.0,0.0984,0.52,64.934,hip hop +Maluma,Corazón (feat. Nego do Borel),184720,False,2018,69,0.722,0.738,9,-6.073,0,0.247,0.328,1.47e-05,0.198,0.748,198.075,latin +Cardi B,Bodak Yellow,223712,True,2018,72,0.926,0.703,11,-6.337,0,0.103,0.0659,0.0,0.231,0.485,125.022,"hip hop, pop" +Lil Baby,Yes Indeed,142273,True,2018,79,0.963,0.346,5,-9.309,0,0.53,0.0355,0.0,0.108,0.562,119.957,hip hop +Jonas Blue,Rise,194407,False,2018,70,0.687,0.785,1,-4.65,1,0.0333,0.327,0.0,0.203,0.655,106.046,"pop, Dance/Electronic" +David Guetta,Flames,194680,False,2018,59,0.631,0.649,5,-5.892,0,0.0385,0.0817,3.8e-06,0.0934,0.421,93.95,"hip hop, pop, Dance/Electronic" +Ozuna,Vaina Loca,176133,False,2018,72,0.754,0.805,6,-4.249,1,0.0752,0.315,0.0,0.203,0.555,93.982,latin +Charlie Puth,How Long,200853,False,2018,72,0.845,0.561,1,-5.253,0,0.0778,0.211,3.49e-06,0.0383,0.811,109.974,pop +Post Malone,Better Now,231266,True,2018,80,0.68,0.578,10,-5.804,1,0.04,0.331,0.0,0.135,0.341,145.038,hip hop +BlocBoy JB,Look Alive (feat. Drake),181263,True,2018,73,0.922,0.581,10,-7.495,1,0.27,0.00104,5.86e-05,0.105,0.595,140.022,"hip hop, pop" +Eminem,River (feat. Ed Sheeran),221013,True,2017,72,0.748,0.749,8,-5.916,0,0.516,0.142,0.0,0.0713,0.659,90.09,hip hop +Marshmello,Happier,214289,False,2018,82,0.687,0.792,5,-2.749,1,0.0452,0.191,0.0,0.167,0.671,100.015,"pop, Dance/Electronic" +Dean Lewis,Be Alright,196373,True,2018,79,0.553,0.586,11,-6.319,1,0.0362,0.697,0.0,0.0813,0.443,126.684,pop +Charlie Puth,Done for Me (feat. Kehlani),180493,False,2018,67,0.856,0.632,6,-3.692,0,0.074,0.193,0.0,0.0688,0.697,112.009,pop +Migos,Stir Fry,190288,True,2018,72,0.817,0.816,2,-5.402,1,0.269,0.00263,0.0,0.159,0.508,181.982,"hip hop, pop" +YG,"BIG BANK (feat. 2 Chainz, Big Sean, Nicki Minaj)",237240,True,2018,68,0.745,0.346,1,-7.709,1,0.331,0.00552,0.0,0.0881,0.112,203.911,"hip hop, pop" +Drake,In My Feelings,217925,True,2018,75,0.835,0.626,1,-5.833,1,0.125,0.0589,6e-05,0.396,0.35,91.03,"hip hop, pop, R&B" +B Young,Jumanji,173153,False,2018,60,0.791,0.473,10,-9.86,0,0.25,0.279,0.0,0.0959,0.603,95.948,"hip hop, Dance/Electronic" +Charlie Puth,The Way I Am,186080,False,2018,64,0.755,0.769,10,-5.658,0,0.186,0.314,2.38e-06,0.0628,0.642,114.966,pop +Reik,Me Niego (feat. Ozuna & Wisin),221653,False,2018,1,0.777,0.779,0,-4.449,0,0.0972,0.0543,0.0,0.636,0.768,94.023,"pop, latin" +benny blanco,Eastside (with Halsey & Khalid),173799,False,2018,75,0.56,0.68,6,-7.648,0,0.321,0.555,0.0,0.116,0.319,89.391,"hip hop, pop" +Luis Fonsi,Échame La Culpa,173720,False,2017,62,0.733,0.892,0,-3.641,1,0.0417,0.0376,0.0,0.137,0.675,95.989,"pop, latin" +MK,17,196489,False,2017,65,0.703,0.832,0,-7.202,0,0.0689,0.00149,0.127,0.0696,0.667,122.029,"pop, Dance/Electronic" +Rita Ora,Anywhere,215064,False,2017,63,0.628,0.797,11,-3.953,0,0.0596,0.0364,0.0,0.104,0.321,106.93,"hip hop, pop, Dance/Electronic" +Banx & Ranx,Answerphone (feat. Yxng Bane),190920,False,2018,56,0.671,0.834,1,-3.928,0,0.0885,0.0387,0.0,0.454,0.454,113.021,Dance/Electronic +Ella Mai,Trip,213993,False,2018,72,0.477,0.61,11,-5.628,0,0.144,0.225,0.0,0.107,0.358,79.882,"hip hop, pop, R&B" +Daddy Yankee,Dura,200480,False,2018,71,0.783,0.84,1,-3.416,1,0.051,0.174,0.0,0.412,0.839,95.001,latin +DJ Khaled,No Brainer,260000,True,2018,67,0.552,0.76,0,-4.706,1,0.342,0.0733,0.0,0.0865,0.639,135.702,"hip hop, pop" +Anne-Marie,2002,186986,False,2018,80,0.697,0.683,1,-2.881,0,0.117,0.0372,0.0,0.137,0.603,96.133,"pop, Dance/Electronic" +Bruno Mars,Finesse - Remix; feat. Cardi B,217288,False,2017,74,0.704,0.859,5,-4.877,0,0.0996,0.0185,0.0,0.0215,0.926,105.115,pop +FINNEAS,Let's Fall in Love for the Night,190348,True,2018,71,0.737,0.408,5,-7.941,1,0.104,0.802,0.0,0.171,0.374,127.921,"rock, pop, Dance/Electronic" +Hugh Jackman,The Greatest Show,302146,False,2017,72,0.417,0.824,11,-7.36,0,0.105,0.000239,0.0545,0.0725,0.4,157.92,easy listening +Halsey,Bad At Love,181279,False,2017,72,0.675,0.751,0,-3.539,1,0.0296,0.0604,0.0,0.0893,0.612,118.384,"pop, Dance/Electronic" +Drake,I'm Upset,214466,True,2018,68,0.899,0.586,11,-7.866,0,0.343,0.279,0.0,0.0836,0.492,150.002,"hip hop, pop, R&B" +Camila Cabello,Never Be the Same,226973,False,2018,74,0.637,0.713,0,-4.333,1,0.0747,0.181,0.000637,0.137,0.243,129.923,pop +M.O,Bad Vibe,214253,False,2018,54,0.813,0.701,10,-4.428,1,0.0449,0.178,0.0,0.0962,0.758,110.009,"pop, Dance/Electronic" +Dua Lipa,IDGAF,217946,True,2017,78,0.836,0.544,7,-5.975,1,0.0943,0.0403,0.0,0.0824,0.51,97.028,pop +Cashmere Cat,Miss You (with Major Lazer & Tory Lanez),186231,False,2018,56,0.747,0.641,1,-4.502,1,0.0925,0.25,0.00107,0.106,0.453,100.028,"pop, rock" +Bazzi,Mine,131064,True,2018,75,0.71,0.789,4,-3.874,1,0.0722,0.0161,2.77e-06,0.451,0.717,142.929,"pop, Dance/Electronic" +Lil Dicky,Freaky Friday (feat. Chris Brown),216631,True,2018,70,0.755,0.599,8,-5.042,1,0.224,0.147,0.0,0.109,0.755,133.123,"hip hop, pop" +Lauv,I Like Me Better,197436,False,2018,81,0.752,0.505,9,-7.621,1,0.253,0.535,2.55e-06,0.104,0.419,91.97,"pop, Dance/Electronic" +The Chainsmokers,This Feeling,197946,False,2018,73,0.575,0.571,1,-7.906,1,0.0439,0.0558,0.0,0.0912,0.449,105.049,"pop, Dance/Electronic" +Maroon 5,Girls Like You (feat. Cardi B) - Cardi B Version,235545,True,2018,73,0.851,0.541,0,-6.825,1,0.0505,0.568,0.0,0.13,0.448,124.959,pop +Khalid,Love Lies (with Normani),201707,False,2018,73,0.708,0.648,6,-5.626,1,0.0449,0.0956,0.0,0.134,0.338,143.955,"pop, R&B" +Drake,Nice For What,210746,True,2018,77,0.585,0.909,8,-6.474,1,0.0707,0.0891,9.7e-05,0.119,0.758,93.372,"hip hop, pop, R&B" +Kendrick Lamar,LOVE. FEAT. ZACARI.,213400,True,2017,80,0.8,0.585,10,-7.343,1,0.0924,0.264,0.0,0.153,0.779,126.058,hip hop +Post Malone,Psycho (feat. Ty Dolla $ign),221440,True,2018,77,0.75,0.56,8,-8.094,1,0.105,0.546,0.0,0.111,0.459,140.06,hip hop +Lil Nas X,Old Town Road - Remix,157066,False,2019,79,0.878,0.619,6,-5.56,1,0.102,0.0533,0.0,0.113,0.639,136.041,"hip hop, pop" +Billie Eilish,bad guy,194087,False,2019,83,0.701,0.425,7,-10.965,1,0.375,0.328,0.13,0.1,0.562,135.128,"pop, Dance/Electronic" +Shawn Mendes,Señorita,190799,False,2019,78,0.759,0.548,9,-6.049,0,0.029,0.0392,0.0,0.0828,0.749,116.967,pop +Ariana Grande,7 rings,178626,True,2019,83,0.778,0.317,1,-10.732,0,0.334,0.592,0.0,0.0881,0.327,140.048,pop +Post Malone,Sunflower - Spider-Man: Into the Spider-Verse,157560,False,2019,79,0.755,0.522,2,-4.368,1,0.0575,0.533,0.0,0.0685,0.925,89.96,hip hop +Gesaffelstein,Lost in the Fire (feat. The Weeknd),202093,True,2019,84,0.658,0.671,2,-12.21,1,0.0363,0.0933,0.000927,0.115,0.166,100.966,Dance/Electronic +Halsey,Without Me,201660,True,2018,77,0.752,0.488,6,-7.05,1,0.0705,0.297,9.11e-06,0.0936,0.533,136.041,"pop, Dance/Electronic" +Regard,Ride It,157605,False,2019,81,0.88,0.751,7,-4.258,0,0.0874,0.177,6.43e-05,0.106,0.884,117.948,"pop, Dance/Electronic" +Lady Gaga,Shallow,215733,False,2018,82,0.572,0.385,7,-6.362,1,0.0308,0.371,0.0,0.231,0.323,95.799,pop +Mark Ronson,Nothing Breaks Like a Heart (feat. Miley Cyrus),217466,False,2018,79,0.601,0.794,7,-5.844,0,0.0671,0.00987,1.36e-06,0.388,0.244,114.066,pop +Lewis Capaldi,Someone You Loved,182160,False,2019,84,0.501,0.405,1,-5.679,1,0.0319,0.751,0.0,0.105,0.446,109.891,pop +Jax Jones,All Day And Night,169303,False,2019,60,0.585,0.782,1,-4.101,0,0.0897,0.269,0.0,0.156,0.521,121.908,"hip hop, pop, Dance/Electronic" +Mustard,Pure Water (with Migos),192470,True,2019,75,0.682,0.559,0,-5.545,1,0.127,0.174,0.0,0.344,0.137,202.015,"hip hop, pop" +NLE Choppa,Shotta Flow (feat. Blueface) [Remix],176631,True,2019,0,0.894,0.511,2,-4.768,1,0.42,0.0251,0.0,0.14,0.568,120.08,hip hop +Kodak Black,ZEZE (feat. Travis Scott & Offset),228759,True,2018,77,0.861,0.603,8,-5.788,0,0.176,0.0521,0.0,0.0924,0.504,98.043,hip hop +Daddy Yankee,Con Calma,193226,False,2019,79,0.737,0.86,8,-2.652,0,0.0593,0.11,1.94e-06,0.0574,0.656,93.989,latin +Alec Benjamin,Let Me Down Slowly,169353,False,2018,82,0.652,0.557,1,-5.714,0,0.0318,0.74,0.0,0.124,0.483,150.073,"rock, pop, Dance/Electronic" +BLACKPINK,Kill This Love,189052,False,2019,1,0.738,0.861,2,-4.141,1,0.237,0.318,0.00182,0.325,0.58,131.98,pop +Offset,Clout (feat. Cardi B),205803,True,2019,68,0.919,0.622,1,-7.384,1,0.0997,0.228,4.24e-06,0.122,0.424,140.022,"hip hop, pop" +MEDUZA,Piece Of Your Heart,152913,False,2019,75,0.677,0.744,10,-6.806,0,0.0295,0.0404,0.00016,0.074,0.631,124.08,"pop, Dance/Electronic" +Ed Sheeran,Antisocial (with Travis Scott),161746,False,2019,63,0.716,0.823,5,-5.313,0,0.0495,0.132,0.0,0.361,0.91,151.957,pop +Travis Scott,SICKO MODE,312820,True,2018,81,0.834,0.73,8,-3.714,1,0.222,0.00513,0.0,0.124,0.446,155.008,"hip hop, Dance/Electronic" +Mabel,Mad Love,169813,False,2019,0,0.631,0.803,0,-2.974,0,0.155,0.675,0.0,0.11,0.62,198.065,"pop, Dance/Electronic" +Lana Del Rey,Doin' Time,202192,True,2019,80,0.641,0.559,7,-11.132,0,0.0355,0.404,0.00402,0.0937,0.523,144.982,pop +DJ Snake,"Taki Taki (feat. Selena Gomez, Ozuna & Cardi B)",212500,True,2019,70,0.842,0.801,8,-4.167,0,0.228,0.157,4.82e-06,0.0642,0.617,95.881,"hip hop, pop, Dance/Electronic" +A Boogie Wit da Hoodie,Look Back at It,179449,True,2018,73,0.791,0.587,3,-5.075,0,0.0413,0.407,0.0,0.148,0.536,96.057,hip hop +Saweetie,My Type,126446,True,2019,71,0.899,0.811,2,-6.294,1,0.258,0.000677,0.0,0.0672,0.587,105.038,"hip hop, pop, R&B" +Lil Tecca,Ransom,131240,True,2019,78,0.745,0.642,7,-6.257,0,0.287,0.0204,0.0,0.0658,0.226,179.974,hip hop +Russ Millions,Keisha & Becky - Remix,252906,True,2019,69,0.863,0.471,6,-9.545,1,0.478,0.251,0.0,0.121,0.644,140.969,pop +Billie Eilish,lovely (with Khalid),200185,False,2018,86,0.351,0.296,4,-10.109,0,0.0333,0.934,0.0,0.095,0.12,115.284,"pop, Dance/Electronic" +Mabel,Don't Call Me Up,178480,False,2019,77,0.674,0.881,9,-2.853,1,0.147,0.296,3.01e-06,0.0793,0.234,98.994,"pop, Dance/Electronic" +Lil Nas X,Old Town Road,113000,False,2019,76,0.907,0.53,1,-6.112,1,0.127,0.0578,2.23e-06,0.101,0.507,135.998,"hip hop, pop" +Meek Mill,Going Bad (feat. Drake),180522,True,2018,78,0.889,0.496,4,-6.365,0,0.0905,0.259,0.0,0.252,0.544,86.003,"hip hop, pop" +Post Malone,Wow.,149546,True,2019,79,0.829,0.539,11,-7.359,0,0.208,0.136,1.78e-06,0.103,0.388,99.96,hip hop +Doja Cat,Juicy,202333,True,2019,57,0.786,0.658,0,-2.61,1,0.0661,0.0856,0.0,0.0689,0.458,170.037,pop +A Boogie Wit da Hoodie,Swervin (feat. 6ix9ine),189486,True,2018,75,0.581,0.662,9,-5.239,1,0.303,0.0153,0.0,0.111,0.434,93.023,hip hop +iann dior,emotions,131213,False,2019,72,0.63,0.63,9,-6.211,1,0.0395,0.0131,0.0,0.142,0.163,80.512,"hip hop, pop" +Paulo Londra,Adan y Eva,256971,False,2019,72,0.767,0.709,1,-4.47,1,0.336,0.323,0.0,0.0745,0.72,171.993,"hip hop, latin, Dance/Electronic" +Drake,Money In The Grave (Drake ft. Rick Ross),205426,True,2019,76,0.831,0.502,10,-4.045,0,0.046,0.101,0.0,0.122,0.101,100.541,"hip hop, pop, R&B" +Lil Nas X,Panini,114893,False,2019,71,0.703,0.594,5,-6.146,0,0.0752,0.342,0.0,0.123,0.475,153.848,"hip hop, pop" +Bad Bunny,MIA (feat. Drake),210367,False,2018,77,0.817,0.539,6,-6.349,0,0.0621,0.0141,0.000496,0.099,0.158,97.062,latin +Polo G,Pop Out (feat. Lil Tjay),166560,True,2019,78,0.772,0.639,1,-7.119,1,0.467,0.15,0.0,0.0698,0.261,168.112,hip hop +Juice WRLD,Robbery,240050,True,2019,53,0.685,0.692,2,-5.122,1,0.0457,0.328,0.0,0.153,0.578,159.966,hip hop +21 Savage,a lot,288624,True,2018,78,0.837,0.636,1,-7.643,1,0.086,0.0395,0.00125,0.342,0.274,145.972,hip hop +Young Thug,The London (feat. J. Cole & Travis Scott),200106,True,2019,69,0.796,0.586,4,-6.946,0,0.147,0.0247,0.0,0.132,0.179,97.981,"hip hop, pop" +Anuel AA,China,301714,False,2019,76,0.786,0.808,7,-3.702,1,0.0882,0.0846,0.000289,0.0822,0.609,105.027,latin +Camila Cabello,Liar,207038,False,2019,65,0.74,0.498,11,-6.684,0,0.0456,0.0169,0.00282,0.319,0.652,98.016,pop +Flipp Dinero,Leave Me Alone,195637,True,2019,69,0.792,0.743,7,-2.806,1,0.0851,0.107,0.0,0.183,0.742,150.024,"hip hop, pop" +Cardi B,Money,183527,True,2018,73,0.95,0.59,8,-6.508,0,0.29,0.00534,0.0,0.11,0.219,130.003,"hip hop, pop" +Ariana Grande,"break up with your girlfriend, i'm bored",190440,True,2019,76,0.726,0.554,5,-5.29,0,0.0917,0.0421,0.0,0.106,0.335,169.999,pop +Dave,Location (feat. Burna Boy),241293,True,2019,78,0.812,0.496,9,-5.969,0,0.297,0.271,0.0,0.0955,0.55,109.979,hip hop +Ava Max,Sweet but Psycho,187436,False,2018,7,0.719,0.704,1,-4.724,1,0.0476,0.0691,0.0,0.166,0.628,133.002,pop +Dua Lipa,Don't Start Now,183290,False,2019,79,0.794,0.793,11,-4.521,0,0.0842,0.0125,0.0,0.0952,0.677,123.941,pop +Young T & Bugsey,Strike a Pose (feat. Aitch),214203,True,2019,61,0.531,0.581,1,-5.801,1,0.101,0.0107,1.62e-05,0.101,0.591,137.776,hip hop +Calvin Harris,Giant (with Rag'n'Bone Man),229184,False,2019,73,0.807,0.887,1,-4.311,0,0.0361,0.016,0.000503,0.0811,0.606,122.015,"hip hop, pop, Dance/Electronic" +Ed Sheeran,Take Me Back to London (feat. Stormzy),189733,True,2019,66,0.885,0.762,8,-5.513,0,0.216,0.219,0.0,0.162,0.605,138.058,pop +Cardi B,Please Me,200889,True,2019,73,0.747,0.57,1,-6.711,1,0.081,0.0642,0.0,0.0832,0.65,133.992,"hip hop, pop" +Stormzy,Vossi Bop,196266,True,2019,64,0.682,0.653,8,-6.062,1,0.339,0.13,0.00116,0.129,0.428,188.115,"hip hop, Dance/Electronic" +Tones And I,Dance Monkey,209438,False,2019,78,0.824,0.588,6,-6.4,0,0.0924,0.692,0.000104,0.149,0.513,98.027,pop +Ariana Grande,boyfriend (with Social House),186106,True,2019,77,0.4,0.795,10,-3.731,0,0.461,0.119,0.0,0.159,0.702,190.097,pop +Bad Bunny,Callaita,250533,True,2019,81,0.61,0.624,2,-4.773,1,0.309,0.6,2.12e-06,0.243,0.244,176.169,latin +Ava Max,So Am I,183026,False,2019,4,0.682,0.656,6,-4.67,1,0.0435,0.0737,0.0,0.353,0.607,130.089,pop +Pedro Capó,Calma - Remix,238200,False,2018,74,0.826,0.773,11,-4.218,0,0.0524,0.323,0.0,0.143,0.761,126.899,"pop, latin" +J. Cole,MIDDLE CHILD,213593,True,2019,80,0.837,0.364,8,-11.713,1,0.276,0.149,0.0,0.271,0.463,123.984,hip hop +Lunay,Soltera - Remix,266086,False,2019,70,0.795,0.783,5,-4.271,1,0.0432,0.361,0.0,0.437,0.799,92.01,"pop, latin" +Khalid,Better,229412,False,2018,70,0.442,0.585,0,-10.332,0,0.0964,0.0984,0.391,0.14,0.116,97.565,"pop, R&B" +Blueface,Thotiana,129264,True,2018,1,0.906,0.382,10,-12.89,0,0.269,0.18,0.0,0.113,0.391,104.025,hip hop +DaBaby,Suge,163320,True,2019,72,0.876,0.662,2,-6.482,0,0.426,0.0608,0.0,0.127,0.844,75.445,hip hop +AJ Tracey,Ladbroke Grove,190537,False,2019,69,0.903,0.839,11,-9.447,0,0.208,0.0939,0.0,0.102,0.727,133.986,"hip hop, Dance/Electronic" +Billie Eilish,bury a friend,193143,False,2019,75,0.905,0.389,8,-14.505,1,0.332,0.74,0.162,0.106,0.196,120.046,"pop, Dance/Electronic" +BTS,Boy With Luv (feat. Halsey),229773,False,2019,35,0.645,0.862,11,-4.757,0,0.0965,0.0923,0.0,0.192,0.798,119.991,pop +Wiley,Boasty (feat. Idris Elba),177185,True,2019,64,0.887,0.765,2,-5.207,1,0.0669,0.00915,0.000872,0.0907,0.456,102.958,"hip hop, Dance/Electronic" +Jonas Brothers,Only Human,183000,False,2019,71,0.795,0.496,0,-5.883,1,0.0722,0.108,0.0,0.0645,0.874,94.01,pop +5 Seconds of Summer,Easier,157492,False,2019,1,0.505,0.428,5,-5.604,1,0.221,0.489,0.0,0.0977,0.618,175.813,pop +Avicii,SOS (feat. Aloe Blacc),157202,False,2019,71,0.802,0.645,5,-6.181,0,0.0715,0.272,0.0,0.119,0.376,100.001,"pop, Dance/Electronic" +"Tyler, The Creator",EARFQUAKE,190066,True,2019,80,0.554,0.498,9,-8.866,1,0.0685,0.23,5.98e-06,0.795,0.413,79.635,hip hop +Ashley O,On A Roll,154447,False,2019,57,0.736,0.81,5,-6.354,1,0.0906,0.077,0.0,0.0523,0.387,125.011,set() +Dominic Fike,3 Nights,177666,False,2018,78,0.815,0.518,7,-6.594,0,0.0897,0.223,0.0,0.104,0.877,151.891,"rock, pop" +Sigala,Wish You Well,205653,False,2019,64,0.669,0.895,5,-3.787,0,0.0575,0.128,0.0,0.297,0.576,124.975,"pop, Dance/Electronic" +Megan Thee Stallion,Hot Girl Summer (feat. Nicki Minaj & Ty Dolla $ign),199427,True,2019,69,0.872,0.814,0,-4.568,1,0.155,0.00485,1.96e-06,0.214,0.57,98.985,"hip hop, pop, R&B" +Anuel AA,Secreto,258800,False,2019,75,0.807,0.803,11,-4.156,1,0.126,0.602,0.00853,0.136,0.706,91.987,latin +Sam Feldt,Post Malone (feat. RANI),174444,False,2019,69,0.59,0.642,7,-3.87,1,0.122,0.0771,0.0,0.105,0.651,107.356,"pop, Dance/Electronic" +Kehlani,Nights Like This (feat. Ty Dolla $ign),201787,True,2019,74,0.61,0.725,8,-5.131,1,0.15,0.367,0.0,0.154,0.291,146.163,"hip hop, pop, R&B" +Sech,Otro Trago,225933,True,2019,71,0.746,0.7,0,-4.669,1,0.341,0.136,0.000159,0.11,0.619,176.044,latin +Post Malone,Better Now,231266,True,2018,80,0.68,0.578,10,-5.804,1,0.04,0.331,0.0,0.135,0.341,145.038,hip hop +Ed Sheeran,Cross Me (feat. Chance the Rapper & PnB Rock),206186,True,2019,64,0.746,0.787,4,-6.373,1,0.12,0.214,0.0,0.0669,0.607,95.005,pop +Lauv,i'm so tired...,162582,False,2019,2,0.599,0.733,11,-7.058,1,0.203,0.176,0.0,0.242,0.534,102.211,"pop, Dance/Electronic" +iann dior,gone girl,136568,True,2019,69,0.677,0.714,11,-5.637,1,0.0287,0.162,0.0,0.0717,0.355,94.956,"hip hop, pop" +Panic! At The Disco,High Hopes,190946,False,2018,80,0.579,0.904,5,-2.729,1,0.0618,0.193,0.0,0.064,0.681,82.014,rock +Marshmello,One Thing Right,181823,False,2019,73,0.659,0.625,4,-2.253,1,0.045,0.0644,0.0,0.582,0.442,88.042,"pop, Dance/Electronic" +Sam Smith,How Do You Sleep?,202204,False,2019,73,0.477,0.682,1,-4.931,0,0.0925,0.153,0.0,0.0763,0.345,110.567,pop +NSG,Options,240081,True,2020,57,0.836,0.621,1,-4.684,0,0.0894,0.389,9.16e-05,0.104,0.762,101.993,"World/Traditional, hip hop" +Normani,Motivation,193837,False,2019,71,0.599,0.887,4,-3.967,1,0.0984,0.0192,1.21e-06,0.3,0.881,170.918,"pop, R&B" +Joel Corry,Sorry,188640,False,2019,63,0.744,0.79,8,-4.617,0,0.0562,0.0547,0.000802,0.32,0.847,125.002,"pop, Dance/Electronic" +Post Malone,Goodbyes (Feat. Young Thug),174960,True,2019,1,0.58,0.653,5,-3.818,1,0.0745,0.447,0.0,0.111,0.175,150.231,hip hop +Jonas Brothers,Sucker,181026,False,2019,79,0.842,0.734,1,-5.065,0,0.0588,0.0427,0.0,0.106,0.952,137.958,pop +Taylor Swift,Cruel Summer,178426,False,2019,78,0.552,0.702,9,-5.707,1,0.157,0.117,2.06e-05,0.105,0.564,169.994,pop +Blanco Brown,The Git Up,200593,False,2019,69,0.847,0.678,9,-8.635,1,0.109,0.0669,0.0,0.274,0.811,97.984,"hip hop, country" +Sam Smith,Dancing With A Stranger (with Normani),171029,False,2019,75,0.741,0.52,8,-7.513,1,0.0656,0.45,1.97e-06,0.222,0.347,102.998,pop +Post Malone,Circles,215280,False,2019,85,0.695,0.762,0,-3.497,1,0.0395,0.192,0.00244,0.0863,0.553,120.042,hip hop diff --git a/docs/.gitignore b/docs/.gitignore index e9ceace7b..af230d587 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -7,3 +7,4 @@ pydvl/* # Changelog CHANGELOG.md +CONTRIBUTING.md \ No newline at end of file diff --git a/docs/api/pydvl/value/shapley/classwise/img/classwise-shapley-discounted-utility-function.svg b/docs/api/pydvl/value/shapley/classwise/img/classwise-shapley-discounted-utility-function.svg index c925f1e4a..78f84d0f3 100644 --- a/docs/api/pydvl/value/shapley/classwise/img/classwise-shapley-discounted-utility-function.svg +++ b/docs/api/pydvl/value/shapley/classwise/img/classwise-shapley-discounted-utility-function.svg @@ -1,68001 +1,3 @@ - - - - - - - - 2023-09-25T22:47:24.903053 - image/svg+xml - - - Matplotlib v3.7.0, https://matplotlib.org/ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +version https://git-lfs.github.com/spec/v1 +oid sha256:ca6951b6a40d0a061432b6508772dc8bef2c2547c69d369d8966a37b50d551aa +size 3528731 diff --git a/docs/assets/influence_functions_example.png b/docs/assets/influence_functions_example.png deleted file mode 100644 index 94f804e9e..000000000 Binary files a/docs/assets/influence_functions_example.png and /dev/null differ diff --git a/docs/assets/logo.svg b/docs/assets/logo.svg index 5869662b9..9287eb0bb 100644 --- a/docs/assets/logo.svg +++ b/docs/assets/logo.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:bce89589a512ec912d06da351f493be037490e6f47cfcdb98df729367be578c8 +size 3138 diff --git a/docs/assets/material-code.svg b/docs/assets/material-code.svg index cbbc31424..28cd55f17 100644 --- a/docs/assets/material-code.svg +++ b/docs/assets/material-code.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:3fa8d133be89acd6e75badb86085a675fcb6a5c17024871a7fb735b097d3f7cf +size 207 diff --git a/docs/assets/material-computer.svg b/docs/assets/material-computer.svg index 74162d034..8ba427c82 100644 --- a/docs/assets/material-computer.svg +++ b/docs/assets/material-computer.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:8765f41e3c045e429c0c496579d4afe07d492ebc36a1a22c45ab29deb4c08b47 +size 394 diff --git a/docs/assets/material-description.svg b/docs/assets/material-description.svg index 904da8a7b..bb6df3f92 100644 --- a/docs/assets/material-description.svg +++ b/docs/assets/material-description.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:c86deb1bb0d4e84112fe56da6584aaf00e1bf64f2b6e2494332a2b77ab0dd153 +size 295 diff --git a/docs/assets/material-toolbox.svg b/docs/assets/material-toolbox.svg index 85146d8ac..cd71e6529 100644 --- a/docs/assets/material-toolbox.svg +++ b/docs/assets/material-toolbox.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:7e16ce0bf72dfb113bc116de6422b56d45947112674d4b1f002d1c9205360028 +size 339 diff --git a/docs/assets/pydvl.bib b/docs/assets/pydvl.bib index a79fb6627..e5b1571e6 100644 --- a/docs/assets/pydvl.bib +++ b/docs/assets/pydvl.bib @@ -10,12 +10,45 @@ @article{agarwal_secondorder_2017 pages = {1--40}, url = {https://www.jmlr.org/papers/v18/16-491.html}, abstract = {First-order stochastic methods are the state-of-the-art in large-scale machine learning optimization owing to efficient per-iteration complexity. Second-order methods, while able to provide faster convergence, have been much less explored due to the high cost of computing the second-order information. In this paper we develop second-order stochastic methods for optimization problems in machine learning that match the per-iteration cost of gradient based methods, and in certain settings improve upon the overall running time over popular first-order methods. Furthermore, our algorithm has the desirable property of being implementable in time linear in the sparsity of the input data.}, + langid = {english}, + keywords = {notion} +} + +@inproceedings{bae_if_2022, + title = {If {{Influence Functions}} Are the {{Answer}}, {{Then What}} Is the {{Question}}?}, + booktitle = {Advances in {{Neural Information Processing Systems}}}, + author = {Bae, Juhan and Ng, Nathan and Lo, Alston and Ghassemi, Marzyeh and Grosse, Roger B.}, + date = {2022-12-06}, + volume = {35}, + pages = {17953--17967}, + location = {New Orleans, LA, USA}, + url = {https://proceedings.neurips.cc/paper_files/paper/2022/hash/7234e0c36fdbcb23e7bd56b68838999b-Abstract-Conference.html}, + urldate = {2023-11-27}, + abstract = {Influence functions efficiently estimate the effect of removing a single training data point on a model's learned parameters. While influence estimates align well with leave-one-out retraining for linear models, recent works have shown this alignment is often poor in neural networks. In this work, we investigate the specific factors that cause this discrepancy by decomposing it into five separate terms. We study the contributions of each term on a variety of architectures and datasets and how they vary with factors such as network width and training time. While practical influence function estimates may be a poor match to leave-one-out retraining for nonlinear networks, we show that they are often a good approximation to a different object we term the proximal Bregman response function (PBRF). Since the PBRF can still be used to answer many of the questions motivating influence functions, such as identifying influential or mislabeled examples, our results suggest that current algorithms for influence function estimation give more informative results than previous error analyses would suggest.}, + eventtitle = {{{NeurIPS}} 2022}, langid = {english} } +@article{bekas_estimator_2007, + title = {An Estimator for the Diagonal of a Matrix}, + author = {Bekas, C. and Kokiopoulou, E. and Saad, Y.}, + date = {2007-11-01}, + journaltitle = {Applied Numerical Mathematics}, + shortjournal = {Applied Numerical Mathematics}, + series = {Numerical {{Algorithms}}, {{Parallelism}} and {{Applications}} (2)}, + volume = {57}, + number = {11}, + pages = {1214--1229}, + issn = {0168-9274}, + doi = {10.1016/j.apnum.2007.01.003}, + url = {https://www.sciencedirect.com/science/article/pii/S0168927407000244}, + urldate = {2024-03-19}, + abstract = {A number of applications require to compute an approximation of the diagonal of a matrix when this matrix is not explicitly available but matrix–vector products with it are easy to evaluate. In some cases, it is the trace of the matrix rather than the diagonal that is needed. This paper describes methods for estimating diagonals and traces of matrices in these situations. The goal is to obtain a good estimate of the diagonal by applying only a small number of matrix–vector products, using selected vectors. We begin by considering the use of random test vectors and then explore special vectors obtained from Hadamard matrices. The methods are tested in the context of computational materials science to estimate the diagonal of the density matrix which holds the charge densities. Numerical experiments indicate that the diagonal estimator may offer an alternative method that in some cases can greatly reduce computational costs in electronic structures calculations.} +} + @article{benmerzoug_re_2023, title = {[{{Re}}] {{If}} You like {{Shapley}}, Then You'll Love the Core}, - author = {Benmerzoug, Anes and Delgado, Miguel de Benito}, + author = {Benmerzoug, Anes and de Benito Delgado, Miguel}, date = {2023-07-31}, journaltitle = {ReScience C}, volume = {9}, @@ -25,6 +58,21 @@ @article{benmerzoug_re_2023 url = {https://zenodo.org/record/8173733}, urldate = {2023-08-27}, abstract = {We investigate the results of [1] in the field of data valuation. We repeat their experiments and conclude that the (Monte Carlo) Least Core is sensitive to important characteristics of the ML problem of interest, making it difficult to apply.}, + keywords = {notion} +} + +@unpublished{broderick_automatic_2021, + title = {An {{Automatic Finite-Sample Robustness Metric}}: {{When Can Dropping}} a {{Little Data Make}} a {{Big Difference}}?}, + shorttitle = {An {{Automatic Finite-Sample Robustness Metric}}}, + author = {Broderick, Tamara and Giordano, Ryan and Meager, Rachael}, + date = {2021-11-03}, + eprint = {2011.14999}, + eprinttype = {arxiv}, + url = {https://arxiv.org/abs/2011.14999}, + abstract = {We propose a method to assess the sensitivity of econometric analyses to the removal of a small fraction of the data. Manually checking the influence of all possible small subsets is computationally infeasible, so we provide an approximation to find the most influential subset. Our metric, the "Approximate Maximum Influence Perturbation," is automatically computable for common methods including (but not limited to) OLS, IV, MLE, GMM, and variational Bayes. We provide finite-sample error bounds on approximation performance. At minimal extra cost, we provide an exact finite-sample lower bound on sensitivity. We find that sensitivity is driven by a signal-to-noise ratio in the inference problem, is not reflected in standard errors, does not disappear asymptotically, and is not due to misspecification. While some empirical applications are robust, results of several economics papers can be overturned by removing less than 1\% of the sample.}, + langid = {english}, + keywords = {notion}, + annotation = {video: https://youtu.be/w8OX0lK1CKo} } @article{castro_polynomial_2009, @@ -39,10 +87,42 @@ @article{castro_polynomial_2009 pages = {1726--1730}, issn = {0305-0548}, doi = {10.1016/j.cor.2008.04.004}, - url = {http://www.sciencedirect.com/science/article/pii/S0305054808000804}, + url = {https://www.sciencedirect.com/science/article/pii/S0305054808000804}, urldate = {2020-11-21}, abstract = {In this paper we develop a polynomial method based on sampling theory that can be used to estimate the Shapley value (or any semivalue) for cooperative games. Besides analyzing the complexity problem, we examine some desirable statistical properties of the proposed approach and provide some computational results.}, - langid = {english} + langid = {english}, + keywords = {notion} +} + +@article{frangella_randomized_2023, + title = {Randomized {{Nyström Preconditioning}}}, + author = {Frangella, Zachary and Tropp, Joel A. and Udell, Madeleine}, + date = {2023-06-30}, + journaltitle = {SIAM Journal on Matrix Analysis and Applications}, + shortjournal = {SIAM J. Matrix Anal. Appl.}, + volume = {44}, + number = {2}, + pages = {718--752}, + publisher = {{Society for Industrial and Applied Mathematics}}, + issn = {0895-4798}, + doi = {10.1137/21M1466244}, + url = {https://epubs.siam.org/doi/abs/10.1137/21M1466244}, + urldate = {2024-03-12}, + abstract = {Randomized methods are becoming increasingly popular in numerical linear algebra. However, few attempts have been made to use them in developing preconditioners. Our interest lies in solving large-scale sparse symmetric positive definite linear systems of equations, where the system matrix is preordered to doubly bordered block diagonal form (for example, using a nested dissection ordering). We investigate the use of randomized methods to construct high-quality preconditioners. In particular, we propose a new and efficient approach that employs Nyström's method for computing low rank approximations to develop robust algebraic two-level preconditioners. Construction of the new preconditioners involves iteratively solving a smaller but denser symmetric positive definite Schur complement system with multiple right-hand sides. Numerical experiments on problems coming from a range of application areas demonstrate that this inner system can be solved cheaply using block conjugate gradients and that using a large convergence tolerance to limit the cost does not adversely affect the quality of the resulting Nyström--Schur two-level preconditioner.} +} + +@inproceedings{george_fast_2018, + title = {Fast {{Approximate Natural Gradient Descent}} in a {{Kronecker Factored Eigenbasis}}}, + booktitle = {Advances in {{Neural Information Processing Systems}}}, + author = {George, Thomas and Laurent, César and Bouthillier, Xavier and Ballas, Nicolas and Vincent, Pascal}, + date = {2018}, + volume = {31}, + eprint = {1806.03884}, + eprinttype = {arxiv}, + publisher = {Curran Associates, Inc.}, + url = {https://proceedings.neurips.cc/paper/2018/hash/48000647b315f6f00f913caa757a70b3-Abstract.html}, + urldate = {2024-01-12}, + abstract = {Optimization algorithms that leverage gradient covariance information, such as variants of natural gradient descent (Amari, 1998), offer the prospect of yielding more effective descent directions. For models with many parameters, the covari- ance matrix they are based on becomes gigantic, making them inapplicable in their original form. This has motivated research into both simple diagonal approxima- tions and more sophisticated factored approximations such as KFAC (Heskes, 2000; Martens \& Grosse, 2015; Grosse \& Martens, 2016). In the present work we draw inspiration from both to propose a novel approximation that is provably better than KFAC and amendable to cheap partial updates. It consists in tracking a diagonal variance, not in parameter coordinates, but in a Kronecker-factored eigenbasis, in which the diagonal approximation is likely to be more effective. Experiments show improvements over KFAC in optimization speed for several deep network architectures.} } @inproceedings{ghorbani_data_2019, @@ -54,9 +134,9 @@ @inproceedings{ghorbani_data_2019 eprint = {1904.02868}, eprinttype = {arxiv}, pages = {2242--2251}, - publisher = {{PMLR}}, + publisher = {PMLR}, issn = {2640-3498}, - url = {http://proceedings.mlr.press/v97/ghorbani19c.html}, + url = {https://proceedings.mlr.press/v97/ghorbani19c.html}, urldate = {2020-11-01}, abstract = {As data becomes the fuel driving technological and economic growth, a fundamental challenge is how to quantify the value of data in algorithmic predictions and decisions. For example, in healthcare and consumer markets, it has been suggested that individuals should be compensated for the data that they generate, but it is not clear what is an equitable valuation for individual data. In this work, we develop a principled framework to address data valuation in the context of supervised machine learning. Given a learning algorithm trained on n data points to produce a predictor, we propose data Shapley as a metric to quantify the value of each training datum to the predictor performance. Data Shapley uniquely satisfies several natural properties of equitable data valuation. We develop Monte Carlo and gradient-based methods to efficiently estimate data Shapley values in practical settings where complex learning algorithms, including neural networks, are trained on large datasets. In addition to being equitable, extensive experiments across biomedical, image and synthetic data demonstrate that data Shapley has several other benefits: 1) it is more powerful than the popular leave-one-out or leverage score in providing insight on what data is more valuable for a given learning task; 2) low Shapley value data effectively capture outliers and corruptions; 3) high Shapley value data inform what type of new data to acquire to improve the predictor.}, eventtitle = {International {{Conference}} on {{Machine Learning}} ({{ICML}} 2019)}, @@ -75,7 +155,7 @@ @article{hampel_influence_1974 eprint = {2285666}, eprinttype = {jstor}, pages = {383--393}, - publisher = {{[American Statistical Association, Taylor \& Francis, Ltd.]}}, + publisher = {[American Statistical Association, Taylor \& Francis, Ltd.]}, issn = {0162-1459}, doi = {10.2307/2285666}, url = {https://www.jstor.org/stable/2285666}, @@ -83,18 +163,36 @@ @article{hampel_influence_1974 abstract = {This paper treats essentially the first derivative of an estimator viewed as functional and the ways in which it can be used to study local robustness properties. A theory of robust estimation "near" strict parametric models is briefly sketched and applied to some classical situations. Relations between von Mises functionals, the jackknife and U-statistics are indicated. A number of classical and new estimators are discussed, including trimmed and Winsorized means, Huber-estimators, and more generally maximum likelihood and M-estimators. Finally, a table with some numerical robustness properties is given.} } -@online{hataya_nystrom_2023, - title = {Nystrom {{Method}} for {{Accurate}} and {{Scalable Implicit Differentiation}}}, +@inproceedings{hataya_nystrom_2023, + title = {Nyström {{Method}} for {{Accurate}} and {{Scalable Implicit Differentiation}}}, + booktitle = {Proceedings of {{The}} 26th {{International Conference}} on {{Artificial Intelligence}} and {{Statistics}}}, author = {Hataya, Ryuichiro and Yamada, Makoto}, - date = {2023-02-19}, - eprint = {2302.09726}, - eprinttype = {arxiv}, - eprintclass = {cs}, - url = {http://arxiv.org/abs/2302.09726}, - urldate = {2023-05-01}, - abstract = {The essential difficulty of gradient-based bilevel optimization using implicit differentiation is to estimate the inverse Hessian vector product with respect to neural network parameters. This paper proposes to tackle this problem by the Nystrom method and the Woodbury matrix identity, exploiting the low-rankness of the Hessian. Compared to existing methods using iterative approximation, such as conjugate gradient and the Neumann series approximation, the proposed method avoids numerical instability and can be efficiently computed in matrix operations without iterations. As a result, the proposed method works stably in various tasks and is faster than iterative approximations. Throughout experiments including large-scale hyperparameter optimization and meta learning, we demonstrate that the Nystrom method consistently achieves comparable or even superior performance to other approaches. The source code is available from https://github.com/moskomule/hypergrad.}, - pubstate = {preprint}, - keywords = {notion} + date = {2023-04-11}, + pages = {4643--4654}, + publisher = {PMLR}, + issn = {2640-3498}, + url = {https://proceedings.mlr.press/v206/hataya23a.html}, + urldate = {2024-02-26}, + abstract = {The essential difficulty of gradient-based bilevel optimization using implicit differentiation is to estimate the inverse Hessian vector product with respect to neural network parameters. This paper proposes to tackle this problem by the Nyström method and the Woodbury matrix identity, exploiting the low-rankness of the Hessian. Compared to existing methods using iterative approximation, such as conjugate gradient and the Neumann series approximation, the proposed method avoids numerical instability and can be efficiently computed in matrix operations without iterations. As a result, the proposed method works stably in various tasks and is faster than iterative approximations. Throughout experiments including large-scale hyperparameter optimization and meta learning, we demonstrate that the Nyström method consistently achieves comparable or even superior performance to other approaches. The source code is available from https://github.com/moskomule/hypergrad.}, + eventtitle = {International {{Conference}} on {{Artificial Intelligence}} and {{Statistics}}}, + langid = {english} +} + +@article{ji_breakdownfree_2017, + title = {A Breakdown-Free Block Conjugate Gradient Method}, + author = {Ji, Hao and Li, Yaohang}, + date = {2017-06}, + journaltitle = {BIT Numerical Mathematics}, + shortjournal = {Bit Numer Math}, + volume = {57}, + number = {2}, + pages = {379--403}, + issn = {0006-3835, 1572-9125}, + doi = {10.1007/s10543-016-0631-z}, + url = {https://link.springer.com/10.1007/s10543-016-0631-z}, + urldate = {2024-02-28}, + abstract = {In this paper, we analyze all possible situations of rank deficiency that cause breakdown in block conjugate gradient (BCG) solvers. A simple solution, breakdownfree block conjugate gradient (BFBCG), is designed to address the rank deficiency problem. The rationale of the BFBCG algorithm is to derive new forms of parameter matrices based on the potentially reduced search subspace to handle rank deficiency. Orthogonality properties and convergence of BFBCG in case of rank deficiency are justified accordingly with mathematical rigor. BFBCG yields faster convergence than restarting BCG when breakdown occurs. Numerical examples suffering from rank deficiency are provided to demonstrate the robustness of BFBCG.}, + langid = {english} } @inproceedings{jia_efficient_2019, @@ -103,9 +201,9 @@ @inproceedings{jia_efficient_2019 author = {Jia, Ruoxi and Dao, David and Wang, Boxin and Hubis, Frances Ann and Hynes, Nick and Gürel, Nezihe Merve and Li, Bo and Zhang, Ce and Song, Dawn and Spanos, Costas J.}, date = {2019-04-11}, pages = {1167--1176}, - publisher = {{PMLR}}, + publisher = {PMLR}, issn = {2640-3498}, - url = {http://proceedings.mlr.press/v89/jia19a.html}, + url = {https://proceedings.mlr.press/v89/jia19a.html}, urldate = {2021-02-12}, abstract = {“How much is my data worth?” is an increasingly common question posed by organizations and individuals alike. An answer to this question could allow, for instance, fairly distributing profits...}, eventtitle = {International {{Conference}} on {{Artificial Intelligence}} and {{Statistics}} ({{AISTATS}})}, @@ -127,7 +225,7 @@ @article{jia_efficient_2019a doi = {10.14778/3342263.3342637}, url = {https://doi.org/10.14778/3342263.3342637}, urldate = {2021-02-12}, - abstract = {Given a data set D containing millions of data points and a data consumer who is willing to pay for \$X to train a machine learning (ML) model over D, how should we distribute this \$X to each data point to reflect its "value"? In this paper, we define the "relative value of data" via the Shapley value, as it uniquely possesses properties with appealing real-world interpretations, such as fairness, rationality and decentralizability. For general, bounded utility functions, the Shapley value is known to be challenging to compute: to get Shapley values for all N data points, it requires O(2N) model evaluations for exact computation and O(N log N) for (ϵ, δ)-approximation. In this paper, we focus on one popular family of ML models relying on K-nearest neighbors (KNN). The most surprising result is that for unweighted KNN classifiers and regressors, the Shapley value of all N data points can be computed, exactly, in O(N log N) time - an exponential improvement on computational complexity! Moreover, for (ϵ, δ)-approximation, we are able to develop an algorithm based on Locality Sensitive Hashing (LSH) with only sublinear complexity O(Nh(ϵ, K) log N) when ϵ is not too small and K is not too large. We empirically evaluate our algorithms on up to 10 million data points and even our exact algorithm is up to three orders of magnitude faster than the baseline approximation algorithm. The LSH-based approximation algorithm can accelerate the value calculation process even further. We then extend our algorithm to other scenarios such as (1) weighed KNN classifiers, (2) different data points are clustered by different data curators, and (3) there are data analysts providing computation who also requires proper valuation. Some of these extensions, although also being improved exponentially, are less practical for exact computation (e.g., O(NK) complexity for weigthed KNN). We thus propose an Monte Carlo approximation algorithm, which is O(N(log N)2/(log K)2) times more efficient than the baseline approximation algorithm.}, + abstract = {Given a data set D containing millions of data points and a data consumer who is willing to pay \textbackslash\$X to train a machine learning (ML) model over D, how should we distribute this \textbackslash\$X to each data point to reflect its "value"? In this paper, we define the "relative value of data" via the Shapley value, as it uniquely possesses properties with appealing real-world interpretations, such as fairness, rationality and decentralizability. For general, bounded utility functions, the Shapley value is known to be challenging to compute: to get Shapley values for all N data points, it requires O(2N) model evaluations for exact computation and O(N log N) for (ϵ, δ)-approximation. In this paper, we focus on one popular family of ML models relying on K-nearest neighbors (KNN). The most surprising result is that for unweighted KNN classifiers and regressors, the Shapley value of all N data points can be computed, exactly, in O(N log N) time - an exponential improvement on computational complexity! Moreover, for (ϵ, δ)-approximation, we are able to develop an algorithm based on Locality Sensitive Hashing (LSH) with only sublinear complexity O(Nh(ϵ, K) log N) when ϵ is not too small and K is not too large. We empirically evaluate our algorithms on up to 10 million data points and even our exact algorithm is up to three orders of magnitude faster than the baseline approximation algorithm. The LSH-based approximation algorithm can accelerate the value calculation process even further. We then extend our algorithm to other scenarios such as (1) weighed KNN classifiers, (2) different data points are clustered by different data curators, and (3) there are data analysts providing computation who also requires proper valuation. Some of these extensions, although also being improved exponentially, are less practical for exact computation (e.g., O(NK) complexity for weigthed KNN). We thus propose an Monte Carlo approximation algorithm, which is O(N(log N)2/(log K)2) times more efficient than the baseline approximation algorithm.}, langid = {english}, keywords = {notion} } @@ -153,7 +251,7 @@ @inproceedings{koh_understanding_2017 eprint = {1703.04730}, eprinttype = {arxiv}, pages = {1885--1894}, - publisher = {{PMLR}}, + publisher = {PMLR}, url = {https://proceedings.mlr.press/v70/koh17a.html}, urldate = {2022-05-09}, abstract = {How can we explain the predictions of a black-box model? In this paper, we use influence functions — a classic technique from robust statistics — to trace a model’s prediction through the learning algorithm and back to its training data, thereby identifying training points most responsible for a given prediction. To scale up influence functions to modern machine learning settings, we develop a simple, efficient implementation that requires only oracle access to gradients and Hessian-vector products. We show that even on non-convex and non-differentiable models where the theory breaks down, approximations to influence functions can still provide valuable information. On linear models and convolutional neural networks, we demonstrate that influence functions are useful for multiple purposes: understanding model behavior, debugging models, detecting dataset errors, and even creating visually-indistinguishable training-set attacks.}, @@ -162,6 +260,19 @@ @inproceedings{koh_understanding_2017 keywords = {notion} } +@inproceedings{kong_resolving_2022, + title = {Resolving {{Training Biases}} via {{Influence-based Data Relabeling}}}, + author = {Kong, Shuming and Shen, Yanyan and Huang, Linpeng}, + date = {2022}, + url = {https://openreview.net/forum?id=EskfH0bwNVn}, + urldate = {2022-05-03}, + abstract = {The performance of supervised learning methods easily suffers from the training bias issue caused by train-test distribution mismatch or label noise. Influence function is a technique that...}, + eventtitle = {International {{Conference}} on {{Learning Representations}} ({{ICLR}} 2022)}, + langid = {english}, + keywords = {notion}, + annotation = {video:https://iclr.cc/virtual/2022/oral/6492} +} + @inproceedings{kwon_beta_2022, title = {Beta {{Shapley}}: A {{Unified}} and {{Noise-reduced Data Valuation Framework}} for {{Machine Learning}}}, shorttitle = {Beta {{Shapley}}}, @@ -171,9 +282,9 @@ @inproceedings{kwon_beta_2022 volume = {151}, eprint = {2110.14049}, eprinttype = {arxiv}, - publisher = {{PMLR}}, - location = {{Valencia, Spain}}, - url = {http://arxiv.org/abs/2110.14049}, + publisher = {PMLR}, + location = {Valencia, Spain}, + url = {https://arxiv.org/abs/2110.14049}, urldate = {2022-04-06}, abstract = {Data Shapley has recently been proposed as a principled framework to quantify the contribution of individual datum in machine learning. It can effectively identify helpful or harmful data points for a learning algorithm. In this paper, we propose Beta Shapley, which is a substantial generalization of Data Shapley. Beta Shapley arises naturally by relaxing the efficiency axiom of the Shapley value, which is not critical for machine learning settings. Beta Shapley unifies several popular data valuation methods and includes data Shapley as a special case. Moreover, we prove that Beta Shapley has several desirable statistical properties and propose efficient algorithms to estimate it. We demonstrate that Beta Shapley outperforms state-of-the-art data valuation methods on several downstream ML tasks such as: 1) detecting mislabeled training data; 2) learning with subsamples; and 3) identifying points whose addition or removal have the largest positive or negative impact on the model.}, eventtitle = {{{AISTATS}} 2022}, @@ -181,6 +292,23 @@ @inproceedings{kwon_beta_2022 keywords = {notion} } +@inproceedings{kwon_dataoob_2023, + title = {Data-{{OOB}}: {{Out-of-bag Estimate}} as a {{Simple}} and {{Efficient Data Value}}}, + shorttitle = {Data-{{OOB}}}, + booktitle = {Proceedings of the 40th {{International Conference}} on {{Machine Learning}}}, + author = {Kwon, Yongchan and Zou, James}, + date = {2023-07-03}, + pages = {18135--18152}, + publisher = {PMLR}, + issn = {2640-3498}, + url = {https://proceedings.mlr.press/v202/kwon23e.html}, + urldate = {2023-09-06}, + abstract = {Data valuation is a powerful framework for providing statistical insights into which data are beneficial or detrimental to model training. Many Shapley-based data valuation methods have shown promising results in various downstream tasks, however, they are well known to be computationally challenging as it requires training a large number of models. As a result, it has been recognized as infeasible to apply to large datasets. To address this issue, we propose Data-OOB, a new data valuation method for a bagging model that utilizes the out-of-bag estimate. The proposed method is computationally efficient and can scale to millions of data by reusing trained weak learners. Specifically, Data-OOB takes less than 2.25 hours on a single CPU processor when there are \$10\^{}6\$ samples to evaluate and the input dimension is 100. Furthermore, Data-OOB has solid theoretical interpretations in that it identifies the same important data point as the infinitesimal jackknife influence function when two different points are compared. We conduct comprehensive experiments using 12 classification datasets, each with thousands of sample sizes. We demonstrate that the proposed method significantly outperforms existing state-of-the-art data valuation methods in identifying mislabeled data and finding a set of helpful (or harmful) data points, highlighting the potential for applying data values in real-world applications.}, + eventtitle = {International {{Conference}} on {{Machine Learning}}}, + langid = {english}, + keywords = {notion} +} + @inproceedings{kwon_efficient_2021, title = {Efficient {{Computation}} and {{Analysis}} of {{Distributional Shapley Values}}}, booktitle = {Proceedings of the 24th {{International Conference}} on {{Artificial Intelligence}} and {{Statistics}}}, @@ -189,15 +317,58 @@ @inproceedings{kwon_efficient_2021 eprint = {2007.01357}, eprinttype = {arxiv}, pages = {793--801}, - publisher = {{PMLR}}, + publisher = {PMLR}, issn = {2640-3498}, - url = {http://proceedings.mlr.press/v130/kwon21a.html}, + url = {https://proceedings.mlr.press/v130/kwon21a.html}, urldate = {2021-04-23}, abstract = {Distributional data Shapley value (DShapley) has recently been proposed as a principled framework to quantify the contribution of individual datum in machine learning. DShapley develops the founda...}, eventtitle = {International {{Conference}} on {{Artificial Intelligence}} and {{Statistics}}}, langid = {english} } +@inproceedings{li_achieving_2022, + title = {Achieving {{Fairness}} at {{No Utility Cost}} via {{Data Reweighing}} with {{Influence}}}, + booktitle = {Proceedings of the 39th {{International Conference}} on {{Machine Learning}}}, + author = {Li, Peizhao and Liu, Hongfu}, + date = {2022-06-28}, + pages = {12917--12930}, + publisher = {PMLR}, + issn = {2640-3498}, + url = {https://proceedings.mlr.press/v162/li22p.html}, + urldate = {2024-03-20}, + abstract = {With the fast development of algorithmic governance, fairness has become a compulsory property for machine learning models to suppress unintentional discrimination. In this paper, we focus on the pre-processing aspect for achieving fairness, and propose a data reweighing approach that only adjusts the weight for samples in the training phase. Different from most previous reweighing methods which usually assign a uniform weight for each (sub)group, we granularly model the influence of each training sample with regard to fairness-related quantity and predictive utility, and compute individual weights based on influence under the constraints from both fairness and utility. Experimental results reveal that previous methods achieve fairness at a non-negligible cost of utility, while as a significant advantage, our approach can empirically release the tradeoff and obtain cost-free fairness for equal opportunity. We demonstrate the cost-free fairness through vanilla classifiers and standard training processes, compared to baseline methods on multiple real-world tabular datasets. Code available at https://github.com/brandeis-machine-learning/influence-fairness.}, + eventtitle = {International {{Conference}} on {{Machine Learning}}}, + langid = {english} +} + +@article{maleki_bounding_2014, + title = {Bounding the {{Estimation Error}} of {{Sampling-based Shapley Value Approximation}}}, + author = {Maleki, Sasan and Tran-Thanh, Long and Hines, Greg and Rahwan, Talal and Rogers, Alex}, + date = {2014-02-12}, + journaltitle = {ArXiv13064265 Cs}, + eprint = {1306.4265}, + eprinttype = {arxiv}, + eprintclass = {cs}, + url = {https://arxiv.org/abs/1306.4265}, + urldate = {2020-11-16}, + abstract = {The Shapley value is arguably the most central normative solution concept in cooperative game theory. It specifies a unique way in which the reward from cooperation can be "fairly" divided among players. While it has a wide range of real world applications, its use is in many cases hampered by the hardness of its computation. A number of researchers have tackled this problem by (i) focusing on classes of games where the Shapley value can be computed efficiently, or (ii) proposing representation formalisms that facilitate such efficient computation, or (iii) approximating the Shapley value in certain classes of games. For the classical \textbackslash textit\{characteristic function\} representation, the only attempt to approximate the Shapley value for the general class of games is due to Castro \textbackslash textit\{et al.\} \textbackslash cite\{castro\}. While this algorithm provides a bound on the approximation error, this bound is \textbackslash textit\{asymptotic\}, meaning that it only holds when the number of samples increases to infinity. On the other hand, when a finite number of samples is drawn, an unquantifiable error is introduced, meaning that the bound no longer holds. With this in mind, we provide non-asymptotic bounds on the estimation error for two cases: where (i) the \textbackslash textit\{variance\}, and (ii) the \textbackslash textit\{range\}, of the players' marginal contributions is known. Furthermore, for the second case, we show that when the range is significantly large relative to the Shapley value, the bound can be improved (from \$O(\textbackslash frac\{r\}\{m\})\$ to \$O(\textbackslash sqrt\{\textbackslash frac\{r\}\{m\}\})\$). Finally, we propose, and demonstrate the effectiveness of using stratified sampling for improving the bounds further.} +} + +@inproceedings{martens_optimizing_2015, + title = {Optimizing {{Neural Networks}} with {{Kronecker-factored Approximate Curvature}}}, + booktitle = {Proceedings of the 32nd {{International Conference}} on {{Machine Learning}}}, + author = {Martens, James and Grosse, Roger}, + date = {2015-06-01}, + pages = {2408--2417}, + publisher = {PMLR}, + issn = {1938-7228}, + url = {https://proceedings.mlr.press/v37/martens15.html}, + urldate = {2022-11-26}, + abstract = {We propose an efficient method for approximating natural gradient descent in neural networks which we call Kronecker-factored Approximate Curvature (K-FAC). K-FAC is based on an efficiently invertible approximation of a neural network’s Fisher information matrix which is neither diagonal nor low-rank, and in some cases is completely non-sparse. It is derived by approximating various large blocks of the Fisher (corresponding to entire layers) as being the Kronecker product of two much smaller matrices. While only several times more expensive to compute than the plain stochastic gradient, the updates produced by K-FAC make much more progress optimizing the objective, which results in an algorithm that can be much faster than stochastic gradient descent with momentum in practice. And unlike some previously proposed approximate natural-gradient/Newton methods which use high-quality non-diagonal curvature matrices (such as Hessian-free optimization), K-FAC works very well in highly stochastic optimization regimes. This is because the cost of storing and inverting K-FAC’s approximation to the curvature matrix does not depend on the amount of data used to estimate it, which is a feature typically associated only with diagonal or low-rank approximations to the curvature matrix.}, + eventtitle = {International {{Conference}} on {{Machine Learning}}}, + langid = {english} +} + @article{mitchell_sampling_2022, title = {Sampling {{Permutations}} for {{Shapley Value Estimation}}}, author = {Mitchell, Rory and Cooper, Joshua and Frank, Eibe and Holmes, Geoffrey}, @@ -208,7 +379,7 @@ @article{mitchell_sampling_2022 number = {43}, pages = {1--46}, issn = {1533-7928}, - url = {http://jmlr.org/papers/v23/21-0439.html}, + url = {https://jmlr.org/papers/v23/21-0439.html}, urldate = {2022-10-23}, abstract = {Game-theoretic attribution techniques based on Shapley values are used to interpret black-box machine learning models, but their exact calculation is generally NP-hard, requiring approximation methods for non-trivial models. As the computation of Shapley values can be expressed as a summation over a set of permutations, a common approach is to sample a subset of these permutations for approximation. Unfortunately, standard Monte Carlo sampling methods can exhibit slow convergence, and more sophisticated quasi-Monte Carlo methods have not yet been applied to the space of permutations. To address this, we investigate new approaches based on two classes of approximation methods and compare them empirically. First, we demonstrate quadrature techniques in a RKHS containing functions of permutations, using the Mallows kernel in combination with kernel herding and sequential Bayesian quadrature. The RKHS perspective also leads to quasi-Monte Carlo type error bounds, with a tractable discrepancy measure defined on permutations. Second, we exploit connections between the hypersphere S d−2 Sd−2 and permutations to create practical algorithms for generating permutation samples with good properties. Experiments show the above techniques provide significant improvements for Shapley value estimates over existing methods, converging to a smaller RMSE in the same number of model evaluations.} } @@ -221,7 +392,7 @@ @inproceedings{okhrati_multilinear_2021 eprint = {2010.12082}, eprinttype = {arxiv}, pages = {7992--7999}, - publisher = {{IEEE}}, + publisher = {IEEE}, issn = {1051-4651}, doi = {10.1109/ICPR48806.2021.9412511}, url = {https://ieeexplore.ieee.org/abstract/document/9412511}, @@ -231,19 +402,23 @@ @inproceedings{okhrati_multilinear_2021 keywords = {notion} } -@inproceedings{schioppa_scaling_2021, +@article{schioppa_scaling_2022, title = {Scaling {{Up Influence Functions}}}, author = {Schioppa, Andrea and Zablotskaia, Polina and Vilar, David and Sokolov, Artem}, - date = {2021-12-06}, + date = {2022-06-28}, + journaltitle = {Proceedings of the AAAI Conference on Artificial Intelligence}, + shortjournal = {Proc. AAAI Conf. Artif. Intell.}, + volume = {36}, + number = {8}, eprint = {2112.03052}, eprinttype = {arxiv}, - eprintclass = {cs}, - publisher = {{arXiv}}, - doi = {10.48550/arXiv.2112.03052}, - url = {http://arxiv.org/abs/2112.03052}, - urldate = {2023-03-10}, - abstract = {We address efficient calculation of influence functions for tracking predictions back to the training data. We propose and analyze a new approach to speeding up the inverse Hessian calculation based on Arnoldi iteration. With this improvement, we achieve, to the best of our knowledge, the first successful implementation of influence functions that scales to full-size (language and vision) Transformer models with several hundreds of millions of parameters. We evaluate our approach on image classification and sequence-to-sequence tasks with tens to a hundred of millions of training examples. Our code will be available at https://github.com/google-research/jax-influence.}, - eventtitle = {{{AAAI-22}}}, + pages = {8179--8186}, + issn = {2374-3468}, + doi = {10.1609/aaai.v36i8.20791}, + url = {https://ojs.aaai.org/index.php/AAAI/article/view/20791}, + urldate = {2024-03-30}, + abstract = {We address efficient calculation of influence functions for tracking predictions back to the training data. We propose and analyze a new approach to speeding up the inverse Hessian calculation based on Arnoldi iteration. With this improvement, we achieve, to the best of our knowledge, the first successful implementation of influence functions that scales to full-size (language and vision) Transformer models with several hundreds of millions of parameters. We evaluate our approach in image classification and sequence-to-sequence tasks with tens to a hundred of millions of training examples. Our code is available at https://github.com/google-research/jax-influence.}, + langid = {english}, keywords = {notion} } @@ -253,7 +428,7 @@ @inproceedings{schoch_csshapley_2022 booktitle = {Proc. of the Thirty-Sixth {{Conference}} on {{Neural Information Processing Systems}} ({{NeurIPS}})}, author = {Schoch, Stephanie and Xu, Haifeng and Ji, Yangfeng}, date = {2022-10-31}, - location = {{New Orleans, Louisiana, USA}}, + location = {New Orleans, Louisiana, USA}, url = {https://openreview.net/forum?id=KTOcrOR5mQ9}, urldate = {2022-11-23}, abstract = {Data valuation, or the valuation of individual datum contributions, has seen growing interest in machine learning due to its demonstrable efficacy for tasks such as noisy label detection. In particular, due to the desirable axiomatic properties, several Shapley value approximations have been proposed. In these methods, the value function is usually defined as the predictive accuracy over the entire development set. However, this limits the ability to differentiate between training instances that are helpful or harmful to their own classes. Intuitively, instances that harm their own classes may be noisy or mislabeled and should receive a lower valuation than helpful instances. In this work, we propose CS-Shapley, a Shapley value with a new value function that discriminates between training instances’ in-class and out-of-class contributions. Our theoretical analysis shows the proposed value function is (essentially) the unique function that satisfies two desirable properties for evaluating data values in classification. Further, our experiments on two benchmark evaluation tasks (data removal and noisy label detection) and four classifiers demonstrate the effectiveness of CS-Shapley over existing methods. Lastly, we evaluate the “transferability” of data values estimated from one classifier to others, and our results suggest Shapley-based data valuation is transferable for application across different models.}, @@ -262,20 +437,33 @@ @inproceedings{schoch_csshapley_2022 keywords = {notion} } -@online{wang_data_2022, +@book{trefethen_numerical_1997, + title = {Numerical {{Linear Algebra}}}, + author = {Trefethen, Lloyd N. and Bau, Iii, David}, + date = {1997-01}, + publisher = {{Society for Industrial and Applied Mathematics}}, + location = {Philadelphia, PA}, + doi = {10.1137/1.9780898719574}, + url = {https://epubs.siam.org/doi/book/10.1137/1.9780898719574}, + urldate = {2024-03-19}, + isbn = {978-0-89871-361-9 978-0-89871-957-4}, + langid = {english} +} + +@inproceedings{wang_data_2022, title = {Data {{Banzhaf}}: {{A Robust Data Valuation Framework}} for {{Machine Learning}}}, shorttitle = {Data {{Banzhaf}}}, + booktitle = {Proceedings of {{The}} 26th {{International Conference}} on {{Artificial Intelligence}} and {{Statistics}}}, author = {Wang, Jiachen T. and Jia, Ruoxi}, - date = {2022-10-22}, - eprint = {2205.15466}, - eprinttype = {arxiv}, - eprintclass = {cs, stat}, - doi = {10.48550/arXiv.2205.15466}, - url = {http://arxiv.org/abs/2205.15466}, - urldate = {2022-10-28}, - abstract = {This paper studies the robustness of data valuation to noisy model performance scores. Particularly, we find that the inherent randomness of the widely used stochastic gradient descent can cause existing data value notions (e.g., the Shapley value and the Leave-one-out error) to produce inconsistent data value rankings across different runs. To address this challenge, we first pose a formal framework within which one can measure the robustness of a data value notion. We show that the Banzhaf value, a value notion originated from cooperative game theory literature, achieves the maximal robustness among all semivalues -- a class of value notions that satisfy crucial properties entailed by ML applications. We propose an algorithm to efficiently estimate the Banzhaf value based on the Maximum Sample Reuse (MSR) principle. We derive the lower bound sample complexity for Banzhaf value estimation, and we show that our MSR algorithm's sample complexity is close to the lower bound. Our evaluation demonstrates that the Banzhaf value outperforms the existing semivalue-based data value notions on several downstream ML tasks such as learning with weighted samples and noisy label detection. Overall, our study suggests that when the underlying ML algorithm is stochastic, the Banzhaf value is a promising alternative to the semivalue-based data value schemes given its computational advantage and ability to robustly differentiate data quality.}, - pubstate = {preprint}, - keywords = {notion} + date = {2023-04-11}, + pages = {6388--6421}, + publisher = {PMLR}, + issn = {2640-3498}, + url = {https://proceedings.mlr.press/v206/wang23e.html}, + urldate = {2024-02-15}, + abstract = {Data valuation has wide use cases in machine learning, including improving data quality and creating economic incentives for data sharing. This paper studies the robustness of data valuation to noisy model performance scores. Particularly, we find that the inherent randomness of the widely used stochastic gradient descent can cause existing data value notions (e.g., the Shapley value and the Leave-one-out error) to produce inconsistent data value rankings across different runs. To address this challenge, we introduce the concept of safety margin, which measures the robustness of a data value notion. We show that the Banzhaf value, a famous value notion that originated from cooperative game theory literature, achieves the largest safety margin among all semivalues (a class of value notions that satisfy crucial properties entailed by ML applications and include the famous Shapley value and Leave-one-out error). We propose an algorithm to efficiently estimate the Banzhaf value based on the Maximum Sample Reuse (MSR) principle. Our evaluation demonstrates that the Banzhaf value outperforms the existing semivalue-based data value notions on several ML tasks such as learning with weighted samples and noisy label detection. Overall, our study suggests that when the underlying ML algorithm is stochastic, the Banzhaf value is a promising alternative to the other semivalue-based data value schemes given its computational advantage and ability to robustly differentiate data quality.}, + eventtitle = {International {{Conference}} on {{Artificial Intelligence}} and {{Statistics}}}, + langid = {english} } @inproceedings{wang_improving_2022, @@ -284,9 +472,9 @@ @inproceedings{wang_improving_2022 date = {2022-04-07}, eprint = {2107.06336v2}, eprinttype = {arxiv}, - publisher = {{arXiv}}, + publisher = {arXiv}, doi = {10.48550/arXiv.2107.06336}, - url = {http://arxiv.org/abs/2107.06336v2}, + url = {https://arxiv.org/abs/2107.06336v2}, urldate = {2022-05-19}, abstract = {The Shapley value (SV) and Least core (LC) are classic methods in cooperative game theory for cost/profit sharing problems. Both methods have recently been proposed as a principled solution for data valuation tasks, i.e., quantifying the contribution of individual datum in machine learning. However, both SV and LC suffer computational challenges due to the need for retraining models on combinatorially many data subsets. In this work, we propose to boost the efficiency in computing Shapley value or Least core by learning to estimate the performance of a learning algorithm on unseen data combinations. Theoretically, we derive bounds relating the error in the predicted learning performance to the approximation error in SV and LC. Empirically, we show that the proposed method can significantly improve the accuracy of SV and LC estimation.}, eventtitle = {International {{Conference}} on {{Learning Representations}} ({{ICLR}} 2022). {{Workshop}} on {{Socially Responsible Machine Learning}}}, @@ -294,6 +482,20 @@ @inproceedings{wang_improving_2022 keywords = {notion} } +@online{watson_accelerated_2023, + title = {Accelerated {{Shapley Value Approximation}} for {{Data Evaluation}}}, + author = {Watson, Lauren and Kujawa, Zeno and Andreeva, Rayna and Yang, Hao-Tsung and Elahi, Tariq and Sarkar, Rik}, + date = {2023-11-09}, + eprint = {2311.05346}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2311.05346}, + url = {https://arxiv.org/abs/2311.05346}, + urldate = {2023-12-07}, + abstract = {Data valuation has found various applications in machine learning, such as data filtering, efficient learning and incentives for data sharing. The most popular current approach to data valuation is the Shapley value. While popular for its various applications, Shapley value is computationally expensive even to approximate, as it requires repeated iterations of training models on different subsets of data. In this paper we show that the Shapley value of data points can be approximated more efficiently by leveraging the structural properties of machine learning problems. We derive convergence guarantees on the accuracy of the approximate Shapley value for different learning settings including Stochastic Gradient Descent with convex and non-convex loss functions. Our analysis suggests that in fact models trained on small subsets are more important in the context of data valuation. Based on this idea, we describe \$\textbackslash delta\$-Shapley -- a strategy of only using small subsets for the approximation. Experiments show that this approach preserves approximate value and rank of data, while achieving speedup of up to 9.9x. In pre-trained networks the approach is found to bring more efficiency in terms of accurate evaluation using small subsets.}, + pubstate = {preprint} +} + @inproceedings{wu_davinz_2022, title = {{{DAVINZ}}: {{Data Valuation}} Using {{Deep Neural Networks}} at {{Initialization}}}, shorttitle = {{{DAVINZ}}}, @@ -301,7 +503,7 @@ @inproceedings{wu_davinz_2022 author = {Wu, Zhaoxuan and Shu, Yao and Low, Bryan Kian Hsiang}, date = {2022-06-28}, pages = {24150--24176}, - publisher = {{PMLR}}, + publisher = {PMLR}, url = {https://proceedings.mlr.press/v162/wu22j.html}, urldate = {2022-10-29}, abstract = {Recent years have witnessed a surge of interest in developing trustworthy methods to evaluate the value of data in many real-world applications (e.g., collaborative machine learning, data marketplaces). Existing data valuation methods typically valuate data using the generalization performance of converged machine learning models after their long-term model training, hence making data valuation on large complex deep neural networks (DNNs) unaffordable. To this end, we theoretically derive a domain-aware generalization bound to estimate the generalization performance of DNNs without model training. We then exploit this theoretically derived generalization bound to develop a novel training-free data valuation method named data valuation at initialization (DAVINZ) on DNNs, which consistently achieves remarkable effectiveness and efficiency in practice. Moreover, our training-free DAVINZ, surprisingly, can even theoretically and empirically enjoy the desirable properties that training-based data valuation methods usually attain, thus making it more trustworthy in practice.}, @@ -317,8 +519,8 @@ @inproceedings{yan_if_2021 date = {2021-05-18}, volume = {6}, pages = {5751--5759}, - publisher = {{Association for the Advancement of Artificial Intelligence}}, - location = {{Virtual conference}}, + publisher = {Association for the Advancement of Artificial Intelligence}, + location = {Virtual conference}, doi = {10.1609/aaai.v35i6.16721}, url = {https://ojs.aaai.org/index.php/AAAI/article/view/16721}, urldate = {2021-04-23}, @@ -327,36 +529,3 @@ @inproceedings{yan_if_2021 langid = {english}, keywords = {notion} } - -@InProceedings{kwon_data_2023, - title = {Data-{OOB}: Out-of-bag Estimate as a Simple and Efficient Data Value}, - author = {Kwon, Yongchan and Zou, James}, - booktitle = {Proceedings of the 40th International Conference on Machine Learning}, - pages = {18135--18152}, - year = {2023}, - editor = {Krause, Andreas and Brunskill, Emma and Cho, Kyunghyun and Engelhardt, Barbara and Sabato, Sivan and Scarlett, Jonathan}, - volume = {202}, - series = {Proceedings of Machine Learning Research}, - month = {23--29 Jul}, - publisher = {PMLR}, - pdf = {https://proceedings.mlr.press/v202/kwon23e/kwon23e.pdf}, - url = {https://proceedings.mlr.press/v202/kwon23e.html}, - abstract = {Data valuation is a powerful framework for providing statistical insights into which data are beneficial or detrimental to model training. Many Shapley-based data valuation methods have shown promising results in various downstream tasks, however, they are well known to be computationally challenging as it requires training a large number of models. As a result, it has been recognized as infeasible to apply to large datasets. To address this issue, we propose Data-OOB, a new data valuation method for a bagging model that utilizes the out-of-bag estimate. The proposed method is computationally efficient and can scale to millions of data by reusing trained weak learners. Specifically, Data-OOB takes less than $2.25$ hours on a single CPU processor when there are $10^6$ samples to evaluate and the input dimension is $100$. Furthermore, Data-OOB has solid theoretical interpretations in that it identifies the same important data point as the infinitesimal jackknife influence function when two different points are compared. We conduct comprehensive experiments using 12 classification datasets, each with thousands of sample sizes. We demonstrate that the proposed method significantly outperforms existing state-of-the-art data valuation methods in identifying mislabeled data and finding a set of helpful (or harmful) data points, highlighting the potential for applying data values in real-world applications.} -} - -@article{george2018fast, - title={Fast approximate natural gradient descent in a kronecker factored eigenbasis}, - author={George, Thomas and Laurent, C{\'e}sar and Bouthillier, Xavier and Ballas, Nicolas and Vincent, Pascal}, - journal={Advances in Neural Information Processing Systems}, - volume={31}, - year={2018} -} - -@inproceedings{martens2015optimizing, - title={Optimizing neural networks with kronecker-factored approximate curvature}, - author={Martens, James and Grosse, Roger}, - booktitle={International conference on machine learning}, - pages={2408--2417}, - year={2015}, - organization={PMLR} -} \ No newline at end of file diff --git a/docs/assets/signet.svg b/docs/assets/signet.svg index 068ac68e8..79cbfd92b 100644 --- a/docs/assets/signet.svg +++ b/docs/assets/signet.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:872cdfe777cd36ba1fb70d95bfec149b3f8df7374aa4bab9001ebf68ecbef975 +size 1720 diff --git a/docs/css/extra.css b/docs/css/extra.css index 0a74470ce..4354e03e9 100644 --- a/docs/css/extra.css +++ b/docs/css/extra.css @@ -69,6 +69,7 @@ a.autorefs-external:hover::after { .nt-card-image:focus { filter: invert(32%) sepia(93%) saturate(1535%) hue-rotate(220deg) brightness(102%) contrast(99%); } + .md-header__button.md-logo { padding: 0; } diff --git a/docs/css/grid-cards.css b/docs/css/grid-cards.css new file mode 100644 index 000000000..8be3dcbb8 --- /dev/null +++ b/docs/css/grid-cards.css @@ -0,0 +1,22 @@ +/* Shadow and Hover */ +.grid.cards > ul > li { + box-shadow: 0 2px 2px 0 rgb(0 0 0 / 14%), 0 3px 1px -2px rgb(0 0 0 / 20%), 0 1px 5px 0 rgb(0 0 0 / 12%); + + &:hover { + transform: scale(1.05); + z-index: 999; + background-color: rgba(0, 0, 0, 0.05); + } +} + +[data-md-color-scheme="slate"] { + .grid.cards > ul > li { + box-shadow: 0 2px 2px 0 rgb(4 40 33 / 14%), 0 3px 1px -2px rgb(40 86 94 / 47%), 0 1px 5px 0 rgb(139 252 255 / 64%); + + &:hover { + transform: scale(1.05); + z-index: 999; + background-color: rgba(139, 252, 255, 0.05); + } + } +} diff --git a/docs/css/neoteroi.css b/docs/css/neoteroi.css deleted file mode 100644 index 363c9229a..000000000 --- a/docs/css/neoteroi.css +++ /dev/null @@ -1 +0,0 @@ -:root{--nt-color-0: #CD853F;--nt-color-1: #B22222;--nt-color-2: #000080;--nt-color-3: #4B0082;--nt-color-4: #3CB371;--nt-color-5: #D2B48C;--nt-color-6: #FF00FF;--nt-color-7: #98FB98;--nt-color-8: #FFEBCD;--nt-color-9: #2E8B57;--nt-color-10: #6A5ACD;--nt-color-11: #48D1CC;--nt-color-12: #FFA500;--nt-color-13: #F4A460;--nt-color-14: #A52A2A;--nt-color-15: #FFE4C4;--nt-color-16: #FF4500;--nt-color-17: #AFEEEE;--nt-color-18: #FA8072;--nt-color-19: #2F4F4F;--nt-color-20: #FFDAB9;--nt-color-21: #BC8F8F;--nt-color-22: #FFC0CB;--nt-color-23: #00FA9A;--nt-color-24: #F0FFF0;--nt-color-25: #FFFACD;--nt-color-26: #F5F5F5;--nt-color-27: #FF6347;--nt-color-28: #FFFFF0;--nt-color-29: #7FFFD4;--nt-color-30: #E9967A;--nt-color-31: #7B68EE;--nt-color-32: #FFF8DC;--nt-color-33: #0000CD;--nt-color-34: #D2691E;--nt-color-35: #708090;--nt-color-36: #5F9EA0;--nt-color-37: #008080;--nt-color-38: #008000;--nt-color-39: #FFE4E1;--nt-color-40: #FFFF00;--nt-color-41: #FFFAF0;--nt-color-42: #DCDCDC;--nt-color-43: #ADFF2F;--nt-color-44: #ADD8E6;--nt-color-45: #8B008B;--nt-color-46: #7FFF00;--nt-color-47: #800000;--nt-color-48: #20B2AA;--nt-color-49: #556B2F;--nt-color-50: #778899;--nt-color-51: #E6E6FA;--nt-color-52: #FFFAFA;--nt-color-53: #FF7F50;--nt-color-54: #FF0000;--nt-color-55: #F5DEB3;--nt-color-56: #008B8B;--nt-color-57: #66CDAA;--nt-color-58: #808000;--nt-color-59: #FAF0E6;--nt-color-60: #00BFFF;--nt-color-61: #C71585;--nt-color-62: #00FFFF;--nt-color-63: #8B4513;--nt-color-64: #F0F8FF;--nt-color-65: #FAEBD7;--nt-color-66: #8B0000;--nt-color-67: #4682B4;--nt-color-68: #F0E68C;--nt-color-69: #BDB76B;--nt-color-70: #A0522D;--nt-color-71: #FAFAD2;--nt-color-72: #FFD700;--nt-color-73: #DEB887;--nt-color-74: #E0FFFF;--nt-color-75: #8A2BE2;--nt-color-76: #32CD32;--nt-color-77: #87CEFA;--nt-color-78: #00CED1;--nt-color-79: #696969;--nt-color-80: #DDA0DD;--nt-color-81: #EE82EE;--nt-color-82: #FFB6C1;--nt-color-83: #8FBC8F;--nt-color-84: #D8BFD8;--nt-color-85: #9400D3;--nt-color-86: #A9A9A9;--nt-color-87: #FFFFE0;--nt-color-88: #FFF5EE;--nt-color-89: #FFF0F5;--nt-color-90: #FFDEAD;--nt-color-91: #800080;--nt-color-92: #B0E0E6;--nt-color-93: #9932CC;--nt-color-94: #DAA520;--nt-color-95: #F0FFFF;--nt-color-96: #40E0D0;--nt-color-97: #00FF7F;--nt-color-98: #006400;--nt-color-99: #808080;--nt-color-100: #87CEEB;--nt-color-101: #0000FF;--nt-color-102: #6495ED;--nt-color-103: #FDF5E6;--nt-color-104: #B8860B;--nt-color-105: #BA55D3;--nt-color-106: #C0C0C0;--nt-color-107: #000000;--nt-color-108: #F08080;--nt-color-109: #B0C4DE;--nt-color-110: #00008B;--nt-color-111: #6B8E23;--nt-color-112: #FFE4B5;--nt-color-113: #FFA07A;--nt-color-114: #9ACD32;--nt-color-115: #FFFFFF;--nt-color-116: #F5F5DC;--nt-color-117: #90EE90;--nt-color-118: #1E90FF;--nt-color-119: #7CFC00;--nt-color-120: #FF69B4;--nt-color-121: #F8F8FF;--nt-color-122: #F5FFFA;--nt-color-123: #00FF00;--nt-color-124: #D3D3D3;--nt-color-125: #DB7093;--nt-color-126: #DA70D6;--nt-color-127: #FF1493;--nt-color-128: #228B22;--nt-color-129: #FFEFD5;--nt-color-130: #4169E1;--nt-color-131: #191970;--nt-color-132: #9370DB;--nt-color-133: #483D8B;--nt-color-134: #FF8C00;--nt-color-135: #EEE8AA;--nt-color-136: #CD5C5C;--nt-color-137: #DC143C}:root{--nt-group-0-main: #000000;--nt-group-0-dark: #FFFFFF;--nt-group-0-light: #000000;--nt-group-0-main-bg: #F44336;--nt-group-0-dark-bg: #BA000D;--nt-group-0-light-bg: #FF7961;--nt-group-1-main: #000000;--nt-group-1-dark: #FFFFFF;--nt-group-1-light: #000000;--nt-group-1-main-bg: #E91E63;--nt-group-1-dark-bg: #B0003A;--nt-group-1-light-bg: #FF6090;--nt-group-2-main: #FFFFFF;--nt-group-2-dark: #FFFFFF;--nt-group-2-light: #000000;--nt-group-2-main-bg: #9C27B0;--nt-group-2-dark-bg: #6A0080;--nt-group-2-light-bg: #D05CE3;--nt-group-3-main: #FFFFFF;--nt-group-3-dark: #FFFFFF;--nt-group-3-light: #000000;--nt-group-3-main-bg: #673AB7;--nt-group-3-dark-bg: #320B86;--nt-group-3-light-bg: #9A67EA;--nt-group-4-main: #FFFFFF;--nt-group-4-dark: #FFFFFF;--nt-group-4-light: #000000;--nt-group-4-main-bg: #3F51B5;--nt-group-4-dark-bg: #002984;--nt-group-4-light-bg: #757DE8;--nt-group-5-main: #000000;--nt-group-5-dark: #FFFFFF;--nt-group-5-light: #000000;--nt-group-5-main-bg: #2196F3;--nt-group-5-dark-bg: #0069C0;--nt-group-5-light-bg: #6EC6FF;--nt-group-6-main: #000000;--nt-group-6-dark: #FFFFFF;--nt-group-6-light: #000000;--nt-group-6-main-bg: #03A9F4;--nt-group-6-dark-bg: #007AC1;--nt-group-6-light-bg: #67DAFF;--nt-group-7-main: #000000;--nt-group-7-dark: #000000;--nt-group-7-light: #000000;--nt-group-7-main-bg: #00BCD4;--nt-group-7-dark-bg: #008BA3;--nt-group-7-light-bg: #62EFFF;--nt-group-8-main: #000000;--nt-group-8-dark: #FFFFFF;--nt-group-8-light: #000000;--nt-group-8-main-bg: #009688;--nt-group-8-dark-bg: #00675B;--nt-group-8-light-bg: #52C7B8;--nt-group-9-main: #000000;--nt-group-9-dark: #FFFFFF;--nt-group-9-light: #000000;--nt-group-9-main-bg: #4CAF50;--nt-group-9-dark-bg: #087F23;--nt-group-9-light-bg: #80E27E;--nt-group-10-main: #000000;--nt-group-10-dark: #000000;--nt-group-10-light: #000000;--nt-group-10-main-bg: #8BC34A;--nt-group-10-dark-bg: #5A9216;--nt-group-10-light-bg: #BEF67A;--nt-group-11-main: #000000;--nt-group-11-dark: #000000;--nt-group-11-light: #000000;--nt-group-11-main-bg: #CDDC39;--nt-group-11-dark-bg: #99AA00;--nt-group-11-light-bg: #FFFF6E;--nt-group-12-main: #000000;--nt-group-12-dark: #000000;--nt-group-12-light: #000000;--nt-group-12-main-bg: #FFEB3B;--nt-group-12-dark-bg: #C8B900;--nt-group-12-light-bg: #FFFF72;--nt-group-13-main: #000000;--nt-group-13-dark: #000000;--nt-group-13-light: #000000;--nt-group-13-main-bg: #FFC107;--nt-group-13-dark-bg: #C79100;--nt-group-13-light-bg: #FFF350;--nt-group-14-main: #000000;--nt-group-14-dark: #000000;--nt-group-14-light: #000000;--nt-group-14-main-bg: #FF9800;--nt-group-14-dark-bg: #C66900;--nt-group-14-light-bg: #FFC947;--nt-group-15-main: #000000;--nt-group-15-dark: #FFFFFF;--nt-group-15-light: #000000;--nt-group-15-main-bg: #FF5722;--nt-group-15-dark-bg: #C41C00;--nt-group-15-light-bg: #FF8A50;--nt-group-16-main: #FFFFFF;--nt-group-16-dark: #FFFFFF;--nt-group-16-light: #000000;--nt-group-16-main-bg: #795548;--nt-group-16-dark-bg: #4B2C20;--nt-group-16-light-bg: #A98274;--nt-group-17-main: #000000;--nt-group-17-dark: #FFFFFF;--nt-group-17-light: #000000;--nt-group-17-main-bg: #9E9E9E;--nt-group-17-dark-bg: #707070;--nt-group-17-light-bg: #CFCFCF;--nt-group-18-main: #000000;--nt-group-18-dark: #FFFFFF;--nt-group-18-light: #000000;--nt-group-18-main-bg: #607D8B;--nt-group-18-dark-bg: #34515E;--nt-group-18-light-bg: #8EACBB}.nt-pastello{--nt-group-0-main: #000000;--nt-group-0-dark: #000000;--nt-group-0-light: #000000;--nt-group-0-main-bg: #EF9A9A;--nt-group-0-dark-bg: #BA6B6C;--nt-group-0-light-bg: #FFCCCB;--nt-group-1-main: #000000;--nt-group-1-dark: #000000;--nt-group-1-light: #000000;--nt-group-1-main-bg: #F48FB1;--nt-group-1-dark-bg: #BF5F82;--nt-group-1-light-bg: #FFC1E3;--nt-group-2-main: #000000;--nt-group-2-dark: #000000;--nt-group-2-light: #000000;--nt-group-2-main-bg: #CE93D8;--nt-group-2-dark-bg: #9C64A6;--nt-group-2-light-bg: #FFC4FF;--nt-group-3-main: #000000;--nt-group-3-dark: #000000;--nt-group-3-light: #000000;--nt-group-3-main-bg: #B39DDB;--nt-group-3-dark-bg: #836FA9;--nt-group-3-light-bg: #E6CEFF;--nt-group-4-main: #000000;--nt-group-4-dark: #000000;--nt-group-4-light: #000000;--nt-group-4-main-bg: #9FA8DA;--nt-group-4-dark-bg: #6F79A8;--nt-group-4-light-bg: #D1D9FF;--nt-group-5-main: #000000;--nt-group-5-dark: #000000;--nt-group-5-light: #000000;--nt-group-5-main-bg: #90CAF9;--nt-group-5-dark-bg: #5D99C6;--nt-group-5-light-bg: #C3FDFF;--nt-group-6-main: #000000;--nt-group-6-dark: #000000;--nt-group-6-light: #000000;--nt-group-6-main-bg: #81D4FA;--nt-group-6-dark-bg: #4BA3C7;--nt-group-6-light-bg: #B6FFFF;--nt-group-7-main: #000000;--nt-group-7-dark: #000000;--nt-group-7-light: #000000;--nt-group-7-main-bg: #80DEEA;--nt-group-7-dark-bg: #4BACB8;--nt-group-7-light-bg: #B4FFFF;--nt-group-8-main: #000000;--nt-group-8-dark: #000000;--nt-group-8-light: #000000;--nt-group-8-main-bg: #80CBC4;--nt-group-8-dark-bg: #4F9A94;--nt-group-8-light-bg: #B2FEF7;--nt-group-9-main: #000000;--nt-group-9-dark: #000000;--nt-group-9-light: #000000;--nt-group-9-main-bg: #A5D6A7;--nt-group-9-dark-bg: #75A478;--nt-group-9-light-bg: #D7FFD9;--nt-group-10-main: #000000;--nt-group-10-dark: #000000;--nt-group-10-light: #000000;--nt-group-10-main-bg: #C5E1A5;--nt-group-10-dark-bg: #94AF76;--nt-group-10-light-bg: #F8FFD7;--nt-group-11-main: #000000;--nt-group-11-dark: #000000;--nt-group-11-light: #000000;--nt-group-11-main-bg: #E6EE9C;--nt-group-11-dark-bg: #B3BC6D;--nt-group-11-light-bg: #FFFFCE;--nt-group-12-main: #000000;--nt-group-12-dark: #000000;--nt-group-12-light: #000000;--nt-group-12-main-bg: #FFF59D;--nt-group-12-dark-bg: #CBC26D;--nt-group-12-light-bg: #FFFFCF;--nt-group-13-main: #000000;--nt-group-13-dark: #000000;--nt-group-13-light: #000000;--nt-group-13-main-bg: #FFE082;--nt-group-13-dark-bg: #CAAE53;--nt-group-13-light-bg: #FFFFB3;--nt-group-14-main: #000000;--nt-group-14-dark: #000000;--nt-group-14-light: #000000;--nt-group-14-main-bg: #FFCC80;--nt-group-14-dark-bg: #CA9B52;--nt-group-14-light-bg: #FFFFB0;--nt-group-15-main: #000000;--nt-group-15-dark: #000000;--nt-group-15-light: #000000;--nt-group-15-main-bg: #FFAB91;--nt-group-15-dark-bg: #C97B63;--nt-group-15-light-bg: #FFDDC1;--nt-group-16-main: #000000;--nt-group-16-dark: #000000;--nt-group-16-light: #000000;--nt-group-16-main-bg: #BCAAA4;--nt-group-16-dark-bg: #8C7B75;--nt-group-16-light-bg: #EFDCD5;--nt-group-17-main: #000000;--nt-group-17-dark: #000000;--nt-group-17-light: #000000;--nt-group-17-main-bg: #EEEEEE;--nt-group-17-dark-bg: #BCBCBC;--nt-group-17-light-bg: #FFFFFF;--nt-group-18-main: #000000;--nt-group-18-dark: #000000;--nt-group-18-light: #000000;--nt-group-18-main-bg: #B0BEC5;--nt-group-18-dark-bg: #808E95;--nt-group-18-light-bg: #E2F1F8}.nt-group-0 .nt-plan-group-summary,.nt-group-0 .nt-timeline-dot{color:var(--nt-group-0-dark);background-color:var(--nt-group-0-dark-bg)}.nt-group-0 .period{color:var(--nt-group-0-main);background-color:var(--nt-group-0-main-bg)}.nt-group-1 .nt-plan-group-summary,.nt-group-1 .nt-timeline-dot{color:var(--nt-group-1-dark);background-color:var(--nt-group-1-dark-bg)}.nt-group-1 .period{color:var(--nt-group-1-main);background-color:var(--nt-group-1-main-bg)}.nt-group-2 .nt-plan-group-summary,.nt-group-2 .nt-timeline-dot{color:var(--nt-group-2-dark);background-color:var(--nt-group-2-dark-bg)}.nt-group-2 .period{color:var(--nt-group-2-main);background-color:var(--nt-group-2-main-bg)}.nt-group-3 .nt-plan-group-summary,.nt-group-3 .nt-timeline-dot{color:var(--nt-group-3-dark);background-color:var(--nt-group-3-dark-bg)}.nt-group-3 .period{color:var(--nt-group-3-main);background-color:var(--nt-group-3-main-bg)}.nt-group-4 .nt-plan-group-summary,.nt-group-4 .nt-timeline-dot{color:var(--nt-group-4-dark);background-color:var(--nt-group-4-dark-bg)}.nt-group-4 .period{color:var(--nt-group-4-main);background-color:var(--nt-group-4-main-bg)}.nt-group-5 .nt-plan-group-summary,.nt-group-5 .nt-timeline-dot{color:var(--nt-group-5-dark);background-color:var(--nt-group-5-dark-bg)}.nt-group-5 .period{color:var(--nt-group-5-main);background-color:var(--nt-group-5-main-bg)}.nt-group-6 .nt-plan-group-summary,.nt-group-6 .nt-timeline-dot{color:var(--nt-group-6-dark);background-color:var(--nt-group-6-dark-bg)}.nt-group-6 .period{color:var(--nt-group-6-main);background-color:var(--nt-group-6-main-bg)}.nt-group-7 .nt-plan-group-summary,.nt-group-7 .nt-timeline-dot{color:var(--nt-group-7-dark);background-color:var(--nt-group-7-dark-bg)}.nt-group-7 .period{color:var(--nt-group-7-main);background-color:var(--nt-group-7-main-bg)}.nt-group-8 .nt-plan-group-summary,.nt-group-8 .nt-timeline-dot{color:var(--nt-group-8-dark);background-color:var(--nt-group-8-dark-bg)}.nt-group-8 .period{color:var(--nt-group-8-main);background-color:var(--nt-group-8-main-bg)}.nt-group-9 .nt-plan-group-summary,.nt-group-9 .nt-timeline-dot{color:var(--nt-group-9-dark);background-color:var(--nt-group-9-dark-bg)}.nt-group-9 .period{color:var(--nt-group-9-main);background-color:var(--nt-group-9-main-bg)}.nt-group-10 .nt-plan-group-summary,.nt-group-10 .nt-timeline-dot{color:var(--nt-group-10-dark);background-color:var(--nt-group-10-dark-bg)}.nt-group-10 .period{color:var(--nt-group-10-main);background-color:var(--nt-group-10-main-bg)}.nt-group-11 .nt-plan-group-summary,.nt-group-11 .nt-timeline-dot{color:var(--nt-group-11-dark);background-color:var(--nt-group-11-dark-bg)}.nt-group-11 .period{color:var(--nt-group-11-main);background-color:var(--nt-group-11-main-bg)}.nt-group-12 .nt-plan-group-summary,.nt-group-12 .nt-timeline-dot{color:var(--nt-group-12-dark);background-color:var(--nt-group-12-dark-bg)}.nt-group-12 .period{color:var(--nt-group-12-main);background-color:var(--nt-group-12-main-bg)}.nt-group-13 .nt-plan-group-summary,.nt-group-13 .nt-timeline-dot{color:var(--nt-group-13-dark);background-color:var(--nt-group-13-dark-bg)}.nt-group-13 .period{color:var(--nt-group-13-main);background-color:var(--nt-group-13-main-bg)}.nt-group-14 .nt-plan-group-summary,.nt-group-14 .nt-timeline-dot{color:var(--nt-group-14-dark);background-color:var(--nt-group-14-dark-bg)}.nt-group-14 .period{color:var(--nt-group-14-main);background-color:var(--nt-group-14-main-bg)}.nt-group-15 .nt-plan-group-summary,.nt-group-15 .nt-timeline-dot{color:var(--nt-group-15-dark);background-color:var(--nt-group-15-dark-bg)}.nt-group-15 .period{color:var(--nt-group-15-main);background-color:var(--nt-group-15-main-bg)}.nt-group-16 .nt-plan-group-summary,.nt-group-16 .nt-timeline-dot{color:var(--nt-group-16-dark);background-color:var(--nt-group-16-dark-bg)}.nt-group-16 .period{color:var(--nt-group-16-main);background-color:var(--nt-group-16-main-bg)}.nt-group-17 .nt-plan-group-summary,.nt-group-17 .nt-timeline-dot{color:var(--nt-group-17-dark);background-color:var(--nt-group-17-dark-bg)}.nt-group-17 .period{color:var(--nt-group-17-main);background-color:var(--nt-group-17-main-bg)}.nt-group-18 .nt-plan-group-summary,.nt-group-18 .nt-timeline-dot{color:var(--nt-group-18-dark);background-color:var(--nt-group-18-dark-bg)}.nt-group-18 .period{color:var(--nt-group-18-main);background-color:var(--nt-group-18-main-bg)}.nt-error{border:2px dashed darkred;padding:0 1rem;background:#faf9ba;color:darkred}.nt-timeline{margin-top:30px}.nt-timeline .nt-timeline-title{font-size:1.1rem;margin-top:0}.nt-timeline .nt-timeline-sub-title{margin-top:0}.nt-timeline .nt-timeline-content{font-size:.8rem;border-bottom:2px dashed #ccc;padding-bottom:1.2rem}.nt-timeline.horizontal .nt-timeline-items{flex-direction:row;overflow-x:scroll}.nt-timeline.horizontal .nt-timeline-items>div{min-width:400px;margin-right:50px}.nt-timeline.horizontal.reverse .nt-timeline-items{flex-direction:row-reverse}.nt-timeline.horizontal.center .nt-timeline-before{background-image:linear-gradient(rgba(252, 70, 107, 0) 0%, rgb(252, 70, 107) 100%);background-repeat:no-repeat;background-size:100% 2px;background-position:0 center}.nt-timeline.horizontal.center .nt-timeline-after{background-image:linear-gradient(180deg, rgb(252, 70, 107) 0%, rgba(252, 70, 107, 0) 100%);background-repeat:no-repeat;background-size:100% 2px;background-position:0 center}.nt-timeline.horizontal.center .nt-timeline-items{background-image:radial-gradient(circle, rgb(63, 94, 251) 0%, rgb(252, 70, 107) 100%);background-repeat:no-repeat;background-size:100% 2px;background-position:0 center}.nt-timeline.horizontal .nt-timeline-dot{left:50%}.nt-timeline.horizontal .nt-timeline-dot:not(.bigger){top:calc(50% - 4px)}.nt-timeline.horizontal .nt-timeline-dot.bigger{top:calc(50% - 15px)}.nt-timeline.vertical .nt-timeline-items{flex-direction:column}.nt-timeline.vertical.reverse .nt-timeline-items{flex-direction:column-reverse}.nt-timeline.vertical.center .nt-timeline-before{background:linear-gradient(rgba(252, 70, 107, 0) 0%, rgb(252, 70, 107) 100%) no-repeat center/2px 100%}.nt-timeline.vertical.center .nt-timeline-after{background:linear-gradient(rgb(252, 70, 107) 0%, rgba(252, 70, 107, 0) 100%) no-repeat center/2px 100%}.nt-timeline.vertical.center .nt-timeline-items{background:radial-gradient(circle, rgb(63, 94, 251) 0%, rgb(252, 70, 107) 100%) no-repeat center/2px 100%}.nt-timeline.vertical.center .nt-timeline-dot{left:calc(50% - 10px)}.nt-timeline.vertical.center .nt-timeline-dot:not(.bigger){top:10px}.nt-timeline.vertical.center .nt-timeline-dot.bigger{left:calc(50% - 20px)}.nt-timeline.vertical.left{padding-left:100px}.nt-timeline.vertical.left .nt-timeline-item{padding-left:70px}.nt-timeline.vertical.left .nt-timeline-sub-title{left:-100px;width:100px}.nt-timeline.vertical.left .nt-timeline-before{background:linear-gradient(rgba(252, 70, 107, 0) 0%, rgb(252, 70, 107) 100%) no-repeat 30px/2px 100%}.nt-timeline.vertical.left .nt-timeline-after{background:linear-gradient(rgb(252, 70, 107) 0%, rgba(252, 70, 107, 0) 100%) no-repeat 30px/2px 100%}.nt-timeline.vertical.left .nt-timeline-items{background:radial-gradient(circle, rgb(63, 94, 251) 0%, rgb(252, 70, 107) 100%) no-repeat 30px/2px 100%}.nt-timeline.vertical.left .nt-timeline-dot{left:21px;top:8px}.nt-timeline.vertical.left .nt-timeline-dot.bigger{top:0px;left:10px}.nt-timeline.vertical.right{padding-right:100px}.nt-timeline.vertical.right .nt-timeline-sub-title{right:-100px;text-align:left;width:100px}.nt-timeline.vertical.right .nt-timeline-item{padding-right:70px}.nt-timeline.vertical.right .nt-timeline-before{background:linear-gradient(rgba(252, 70, 107, 0) 0%, rgb(252, 70, 107) 100%) no-repeat calc(100% - 30px)/2px 100%}.nt-timeline.vertical.right .nt-timeline-after{background:linear-gradient(rgb(252, 70, 107) 0%, rgba(252, 70, 107, 0) 100%) no-repeat calc(100% - 30px)/2px 100%}.nt-timeline.vertical.right .nt-timeline-items{background:radial-gradient(circle, rgb(63, 94, 251) 0%, rgb(252, 70, 107) 100%) no-repeat calc(100% - 30px)/2px 100%}.nt-timeline.vertical.right .nt-timeline-dot{right:21px;top:8px}.nt-timeline.vertical.right .nt-timeline-dot.bigger{top:10px;right:10px}.nt-timeline-items{display:flex;position:relative}.nt-timeline-items>div{min-height:100px;padding-top:2px;padding-bottom:20px}.nt-timeline-before{content:"";height:15px}.nt-timeline-after{content:"";height:60px;margin-bottom:20px}.nt-timeline-sub-title{position:absolute;width:50%;top:4px;font-size:18px;color:var(--nt-color-50)}[data-md-color-scheme=slate] .nt-timeline-sub-title{color:var(--nt-color-51)}.nt-timeline-item{position:relative}.nt-timeline.vertical.center:not(.alternate) .nt-timeline-item{padding-left:calc(50% + 40px)}.nt-timeline.vertical.center:not(.alternate) .nt-timeline-item .nt-timeline-sub-title{left:0;padding-right:40px;text-align:right}.nt-timeline.vertical.center.alternate .nt-timeline-item:nth-child(odd){padding-left:calc(50% + 40px)}.nt-timeline.vertical.center.alternate .nt-timeline-item:nth-child(odd) .nt-timeline-sub-title{left:0;padding-right:40px;text-align:right}.nt-timeline.vertical.center.alternate .nt-timeline-item:nth-child(even){text-align:right;padding-right:calc(50% + 40px)}.nt-timeline.vertical.center.alternate .nt-timeline-item:nth-child(even) .nt-timeline-sub-title{right:0;padding-left:40px;text-align:left}.nt-timeline-dot{position:relative;width:20px;height:20px;border-radius:100%;background-color:#fc5b5b;position:absolute;top:0px;z-index:2;display:flex;justify-content:center;align-items:center;box-shadow:0 2px 1px -1px rgba(0,0,0,.2),0 1px 1px 0 rgba(0,0,0,.14),0 1px 3px 0 rgba(0,0,0,.12);border:3px solid #fff}.nt-timeline-dot:not(.bigger) .icon{font-size:10px}.nt-timeline-dot.bigger{width:40px;height:40px;padding:3px}.nt-timeline-dot .icon{color:#fff}@supports not (-moz-appearance: none){details .nt-timeline.vertical.center.alternate .nt-timeline-item:nth-child(odd) .nt-timeline-sub-title,details .nt-timeline.vertical.center:not(.alternate) .nt-timeline-item .nt-timeline-sub-title{left:-40px}details .nt-timeline.vertical.center.alternate .nt-timeline-item:nth-child(even) .nt-timeline-sub-title{right:-40px}details .nt-timeline.vertical.center .nt-timeline-dot{left:calc(50% - 12px)}details .nt-timeline-dot.bigger{font-size:1rem !important}}.nt-timeline-item:nth-child(0) .nt-timeline-dot{background-color:var(--nt-color-0)}.nt-timeline-item:nth-child(1) .nt-timeline-dot{background-color:var(--nt-color-1)}.nt-timeline-item:nth-child(2) .nt-timeline-dot{background-color:var(--nt-color-2)}.nt-timeline-item:nth-child(3) .nt-timeline-dot{background-color:var(--nt-color-3)}.nt-timeline-item:nth-child(4) .nt-timeline-dot{background-color:var(--nt-color-4)}.nt-timeline-item:nth-child(5) .nt-timeline-dot{background-color:var(--nt-color-5)}.nt-timeline-item:nth-child(6) .nt-timeline-dot{background-color:var(--nt-color-6)}.nt-timeline-item:nth-child(7) .nt-timeline-dot{background-color:var(--nt-color-7)}.nt-timeline-item:nth-child(8) .nt-timeline-dot{background-color:var(--nt-color-8)}.nt-timeline-item:nth-child(9) .nt-timeline-dot{background-color:var(--nt-color-9)}.nt-timeline-item:nth-child(10) .nt-timeline-dot{background-color:var(--nt-color-10)}.nt-timeline-item:nth-child(11) .nt-timeline-dot{background-color:var(--nt-color-11)}.nt-timeline-item:nth-child(12) .nt-timeline-dot{background-color:var(--nt-color-12)}.nt-timeline-item:nth-child(13) .nt-timeline-dot{background-color:var(--nt-color-13)}.nt-timeline-item:nth-child(14) .nt-timeline-dot{background-color:var(--nt-color-14)}.nt-timeline-item:nth-child(15) .nt-timeline-dot{background-color:var(--nt-color-15)}.nt-timeline-item:nth-child(16) .nt-timeline-dot{background-color:var(--nt-color-16)}.nt-timeline-item:nth-child(17) .nt-timeline-dot{background-color:var(--nt-color-17)}.nt-timeline-item:nth-child(18) .nt-timeline-dot{background-color:var(--nt-color-18)}.nt-timeline-item:nth-child(19) .nt-timeline-dot{background-color:var(--nt-color-19)}.nt-timeline-item:nth-child(20) .nt-timeline-dot{background-color:var(--nt-color-20)}:root{--nt-scrollbar-color: #2751b0;--nt-plan-actions-height: 24px;--nt-units-background: #ff9800;--nt-months-background: #2751b0;--nt-plan-vertical-line-color: #a3a3a3ad}.nt-pastello{--nt-scrollbar-color: #9fb8f4;--nt-units-background: #f5dc82;--nt-months-background: #5b7fd1}[data-md-color-scheme=slate]{--nt-units-background: #003773}[data-md-color-scheme=slate] .nt-pastello{--nt-units-background: #3f4997}.nt-plan-root{min-height:200px;scrollbar-width:20px;scrollbar-color:var(--nt-scrollbar-color);display:flex}.nt-plan-root ::-webkit-scrollbar{width:20px}.nt-plan-root ::-webkit-scrollbar-track{box-shadow:inset 0 0 5px gray;border-radius:10px}.nt-plan-root ::-webkit-scrollbar-thumb{background:var(--nt-scrollbar-color);border-radius:10px}.nt-plan-root .nt-plan{flex:80%}.nt-plan-root.no-groups .nt-plan-periods{padding-left:0}.nt-plan-root.no-groups .nt-plan-group-summary{display:none}.nt-plan-root .nt-timeline-dot.bigger{top:-10px}.nt-plan-root .nt-timeline-dot.bigger[title]{cursor:help}.nt-plan{white-space:nowrap;overflow-x:auto;display:flex}.nt-plan .ug-timeline-dot{left:368px;top:-8px;cursor:help}.months{display:flex}.month{flex:auto;display:inline-block;box-shadow:rgba(0,0,0,.2) 0px 3px 1px -2px,rgba(0,0,0,.14) 0px 2px 2px 0px,rgba(0,0,0,.12) 0px 1px 5px 0px inset;background-color:var(--nt-months-background);color:#fff;text-transform:uppercase;font-family:Roboto,Helvetica,Arial,sans-serif;padding:2px 5px;font-size:12px;border:1px solid #000;width:150px;border-radius:8px}.nt-plan-group-activities{flex:auto;position:relative}.nt-vline{border-left:1px dashed var(--nt-plan-vertical-line-color);height:100%;left:0;position:absolute;margin-left:-0.5px;top:0;-webkit-transition:all .5s linear !important;-moz-transition:all .5s linear !important;-ms-transition:all .5s linear !important;-o-transition:all .5s linear !important;transition:all .5s linear !important;z-index:-2}.nt-plan-activity{display:flex;margin:2px 0;background-color:rgba(187,187,187,.2509803922)}.actions{height:var(--nt-plan-actions-height)}.actions{position:relative}.period{display:inline-block;height:var(--nt-plan-actions-height);width:120px;position:absolute;left:0px;background:#1da1f2;border-radius:5px;transition:all .5s;cursor:help;-webkit-transition:width 1s ease-in-out;-moz-transition:width 1s ease-in-out;-o-transition:width 1s ease-in-out;transition:width 1s ease-in-out}.period .nt-tooltip{display:none;top:30px;position:relative;padding:1rem;text-align:center;font-size:12px}.period:hover .nt-tooltip{display:inline-block}.period-0{left:340px;visibility:visible;background-color:#456165}.period-1{left:40px;visibility:visible;background-color:green}.period-2{left:120px;visibility:visible;background-color:pink;width:80px}.period-3{left:190px;visibility:visible;background-color:darkred;width:150px}.weeks>span,.days>span{height:25px}.weeks>span{display:inline-block;margin:0;padding:0;font-weight:bold}.weeks>span .week-text{font-size:10px;position:absolute;display:inline-block;padding:3px 4px}.days{z-index:-2;position:relative}.day-text{font-size:10px;position:absolute;display:inline-block;padding:3px 4px}.period span{font-size:12px;vertical-align:top;margin-left:4px;color:#000;background:rgba(255,255,255,.6588235294);border-radius:6px;padding:0 4px}.weeks,.days{height:20px;display:flex;box-sizing:content-box}.months{display:flex}.week,.day{height:20px;position:relative;border:1;flex:auto;border:2px solid #fff;border-radius:4px;background-color:var(--nt-units-background);cursor:help}.years{display:flex}.year{text-align:center;border-right:1px solid var(--nt-plan-vertical-line-color);font-weight:bold}.year:first-child{border-left:1px solid var(--nt-plan-vertical-line-color)}.year:first-child:last-child{width:100%}.quarters{display:flex}.quarter{width:12.5%;text-align:center;border-right:1px solid var(--nt-plan-vertical-line-color);font-weight:bold}.quarter:first-child{border-left:1px solid var(--nt-plan-vertical-line-color)}.nt-plan-group{margin:20px 0;position:relative}.nt-plan-group{display:flex}.nt-plan-group-summary{background:#2751b0;width:150px;white-space:normal;padding:.1rem .5rem;border-radius:5px;color:#fff;z-index:3}.nt-plan-group-summary p{margin:0;padding:0;font-size:.6rem;color:#fff}.nt-plan-group-summary,.month,.period,.week,.day,.nt-tooltip{border:3px solid #fff;box-shadow:0 2px 3px -1px rgba(0,0,0,.2),0 3px 3px 0 rgba(0,0,0,.14),0 1px 5px 0 rgba(0,0,0,.12)}.nt-plan-periods{padding-left:150px}.months{z-index:2;position:relative}.weeks{position:relative;top:-2px;z-index:0}.month,.quarter,.year,.week,.day,.nt-tooltip{font-family:Roboto,Helvetica,Arial,sans-serif;box-sizing:border-box}.nt-cards.nt-grid{display:grid;grid-auto-columns:1fr;gap:.5rem;max-width:100vw;overflow-x:auto;padding:1px}.nt-cards.nt-grid.cols-1{grid-template-columns:repeat(1, 1fr)}.nt-cards.nt-grid.cols-2{grid-template-columns:repeat(2, 1fr)}.nt-cards.nt-grid.cols-3{grid-template-columns:repeat(3, 1fr)}.nt-cards.nt-grid.cols-4{grid-template-columns:repeat(4, 1fr)}.nt-cards.nt-grid.cols-5{grid-template-columns:repeat(5, 1fr)}.nt-cards.nt-grid.cols-6{grid-template-columns:repeat(6, 1fr)}@media only screen and (max-width: 400px){.nt-cards.nt-grid{grid-template-columns:repeat(1, 1fr) !important}}.nt-card{box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12)}.nt-card:hover{box-shadow:0 2px 2px 0 rgba(0,0,0,.24),0 3px 1px -2px rgba(0,0,0,.3),0 1px 5px 0 rgba(0,0,0,.22)}[data-md-color-scheme=slate] .nt-card{box-shadow:0 2px 2px 0 rgba(4,40,33,.14),0 3px 1px -2px rgba(40,86,94,.47),0 1px 5px 0 rgba(139,252,255,.64)}[data-md-color-scheme=slate] .nt-card:hover{box-shadow:0 2px 2px 0 rgba(0,255,206,.14),0 3px 1px -2px rgba(33,156,177,.47),0 1px 5px 0 rgba(96,251,255,.64)}.nt-card>a{color:var(--md-default-fg-color)}.nt-card>a>div{cursor:pointer}.nt-card{padding:5px;margin-bottom:.5rem}.nt-card-title{font-size:1rem;font-weight:bold;margin:4px 0 8px 0;line-height:22px}.nt-card-content{padding:.4rem .8rem .8rem .8rem}.nt-card-text{font-size:14px;padding:0;margin:0}.nt-card .nt-card-image{text-align:center;border-radius:2px;background-position:center center;background-size:cover;background-repeat:no-repeat;min-height:120px}.nt-card .nt-card-image.tags img{margin-top:12px}.nt-card .nt-card-image img{height:105px;margin-top:5px}.nt-card a:hover,.nt-card a:focus{color:var(--md-accent-fg-color)}.nt-card h2{margin:0}.span-table-wrapper table{border-collapse:collapse;margin-bottom:2rem;border-radius:.1rem}.span-table td,.span-table th{padding:.2rem;background-color:var(--md-default-bg-color);font-size:.64rem;max-width:100%;overflow:auto;touch-action:auto;border-top:.05rem solid var(--md-typeset-table-color);padding:.9375em 1.25em;vertical-align:top}.span-table tr:first-child td{font-weight:700;min-width:5rem;padding:.9375em 1.25em;vertical-align:top}.span-table td:first-child{border-left:.05rem solid var(--md-typeset-table-color)}.span-table td:last-child{border-right:.05rem solid var(--md-typeset-table-color)}.span-table tr:last-child{border-bottom:.05rem solid var(--md-typeset-table-color)}.span-table [colspan],.span-table [rowspan]{font-weight:bold;border:.05rem solid var(--md-typeset-table-color)}.span-table tr:not(:first-child):hover td:not([colspan]):not([rowspan]),.span-table td[colspan]:hover,.span-table td[rowspan]:hover{background-color:rgba(0,0,0,.035);box-shadow:0 .05rem 0 var(--md-default-bg-color) inset;transition:background-color 125ms}.nt-contribs{margin-top:2rem;font-size:small;border-top:1px dotted #d3d3d3;padding-top:.5rem}.nt-contribs .nt-contributors{padding-top:.5rem;display:flex;flex-wrap:wrap}.nt-contribs .nt-contributor{background:#d3d3d3;background-size:cover;width:40px;height:40px;border-radius:100%;margin:0 6px 6px 0;cursor:help;opacity:.7}.nt-contribs .nt-contributor:hover{opacity:1}.nt-contribs .nt-initials{text-transform:uppercase;font-size:24px;text-align:center;width:40px;height:40px;display:inline-block;vertical-align:middle;position:relative;top:2px;color:inherit;font-weight:bold}.nt-contribs .nt-group-0{background-color:var(--nt-color-0)}.nt-contribs .nt-group-1{background-color:var(--nt-color-1)}.nt-contribs .nt-group-2{background-color:var(--nt-color-2)}.nt-contribs .nt-group-3{background-color:var(--nt-color-3)}.nt-contribs .nt-group-4{background-color:var(--nt-color-4)}.nt-contribs .nt-group-5{background-color:var(--nt-color-5)}.nt-contribs .nt-group-6{background-color:var(--nt-color-6)}.nt-contribs .nt-group-7{color:#000;background-color:var(--nt-color-7)}.nt-contribs .nt-group-8{color:#000;background-color:var(--nt-color-8)}.nt-contribs .nt-group-9{background-color:var(--nt-color-9)}.nt-contribs .nt-group-10{background-color:var(--nt-color-10)}.nt-contribs .nt-group-11{background-color:var(--nt-color-11)}.nt-contribs .nt-group-12{background-color:var(--nt-color-12)}.nt-contribs .nt-group-13{background-color:var(--nt-color-13)}.nt-contribs .nt-group-14{background-color:var(--nt-color-14)}.nt-contribs .nt-group-15{color:#000;background-color:var(--nt-color-15)}.nt-contribs .nt-group-16{background-color:var(--nt-color-16)}.nt-contribs .nt-group-17{color:#000;background-color:var(--nt-color-17)}.nt-contribs .nt-group-18{background-color:var(--nt-color-18)}.nt-contribs .nt-group-19{background-color:var(--nt-color-19)}.nt-contribs .nt-group-20{color:#000;background-color:var(--nt-color-20)}.nt-contribs .nt-group-21{color:#000;background-color:var(--nt-color-21)}.nt-contribs .nt-group-22{color:#000;background-color:var(--nt-color-22)}.nt-contribs .nt-group-23{color:#000;background-color:var(--nt-color-23)}.nt-contribs .nt-group-24{color:#000;background-color:var(--nt-color-24)}.nt-contribs .nt-group-25{color:#000;background-color:var(--nt-color-25)}.nt-contribs .nt-group-26{color:#000;background-color:var(--nt-color-26)}.nt-contribs .nt-group-27{background-color:var(--nt-color-27)}.nt-contribs .nt-group-28{color:#000;background-color:var(--nt-color-28)}.nt-contribs .nt-group-29{color:#000;background-color:var(--nt-color-29)}.nt-contribs .nt-group-30{background-color:var(--nt-color-30)}.nt-contribs .nt-group-31{background-color:var(--nt-color-31)}.nt-contribs .nt-group-32{color:#000;background-color:var(--nt-color-32)}.nt-contribs .nt-group-33{background-color:var(--nt-color-33)}.nt-contribs .nt-group-34{background-color:var(--nt-color-34)}.nt-contribs .nt-group-35{background-color:var(--nt-color-35)}.nt-contribs .nt-group-36{background-color:var(--nt-color-36)}.nt-contribs .nt-group-37{background-color:var(--nt-color-37)}.nt-contribs .nt-group-38{background-color:var(--nt-color-38)}.nt-contribs .nt-group-39{color:#000;background-color:var(--nt-color-39)}.nt-contribs .nt-group-40{color:#000;background-color:var(--nt-color-40)}.nt-contribs .nt-group-41{color:#000;background-color:var(--nt-color-41)}.nt-contribs .nt-group-42{color:#000;background-color:var(--nt-color-42)}.nt-contribs .nt-group-43{color:#000;background-color:var(--nt-color-43)}.nt-contribs .nt-group-44{color:#000;background-color:var(--nt-color-44)}.nt-contribs .nt-group-45{background-color:var(--nt-color-45)}.nt-contribs .nt-group-46{color:#000;background-color:var(--nt-color-46)}.nt-contribs .nt-group-47{background-color:var(--nt-color-47)}.nt-contribs .nt-group-48{background-color:var(--nt-color-48)}.nt-contribs .nt-group-49{background-color:var(--nt-color-49)} \ No newline at end of file diff --git a/docs/examples/img/data_oob.png b/docs/examples/img/data_oob.png new file mode 100644 index 000000000..3e31320a6 --- /dev/null +++ b/docs/examples/img/data_oob.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:efbb4beca235aa2375203db288086c57515ea67333017775e995f47f0cc8bc78 +size 30539 diff --git a/docs/examples/img/influence_functions_example.png b/docs/examples/img/influence_functions_example.png new file mode 100644 index 000000000..c42dd47c9 --- /dev/null +++ b/docs/examples/img/influence_functions_example.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f10560a9fcc68a1493dd90f788f0f8839ebc52cfc9e59cbc0bfc16312bd6db79 +size 30895 diff --git a/docs/examples/img/influence_imagenet.png b/docs/examples/img/influence_imagenet.png new file mode 100644 index 000000000..d90d9cdf6 --- /dev/null +++ b/docs/examples/img/influence_imagenet.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3553d0079255a94f5cb4b2f83ae8302b733765409715618bf36eb8ee9f0723ad +size 88738 diff --git a/docs/examples/img/influence_sentiment_analysis.png b/docs/examples/img/influence_sentiment_analysis.png new file mode 100644 index 000000000..a9897368d --- /dev/null +++ b/docs/examples/img/influence_sentiment_analysis.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:275dda95990b104f7d3fecf4a6810872fd45295fe5de4982a73d69c3627213d0 +size 13638 diff --git a/docs/examples/img/influence_wine.png b/docs/examples/img/influence_wine.png new file mode 100644 index 000000000..5a871a59e --- /dev/null +++ b/docs/examples/img/influence_wine.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b805f9502a46433aca3bbc5e23e7b8d3016a6760060ad12c2a06ef075585033e +size 10403 diff --git a/docs/examples/img/least_core_basic.png b/docs/examples/img/least_core_basic.png new file mode 100644 index 000000000..5b7c04eb1 --- /dev/null +++ b/docs/examples/img/least_core_basic.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7cb2d4b5ec195f2e57ac55393c31d37701243b464fbfcdae3e8c02bbf676f236 +size 31286 diff --git a/docs/examples/img/shapley_basic_spotify.png b/docs/examples/img/shapley_basic_spotify.png new file mode 100644 index 000000000..0e2892bdb --- /dev/null +++ b/docs/examples/img/shapley_basic_spotify.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:697b4b85ffc4e8236fc044e599b0599b35e781a634ccf7c80be3fecf71444ba3 +size 42742 diff --git a/docs/examples/img/shapley_knn_flowers.png b/docs/examples/img/shapley_knn_flowers.png new file mode 100644 index 000000000..7ebcb9f48 --- /dev/null +++ b/docs/examples/img/shapley_knn_flowers.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26221247423d6be29506d2120f0e6fcf8b2c7cb4005658a61b5b3b1f1bd1565b +size 38583 diff --git a/docs/examples/img/shapley_utility_learning.png b/docs/examples/img/shapley_utility_learning.png new file mode 100644 index 000000000..3417be0a2 --- /dev/null +++ b/docs/examples/img/shapley_utility_learning.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd7db4622253674594fc9588525e60633bbacf73b60287f155f9e7b00605bcce +size 12820 diff --git a/docs/examples/index.md b/docs/examples/index.md new file mode 100644 index 000000000..3ab36c0a2 --- /dev/null +++ b/docs/examples/index.md @@ -0,0 +1,96 @@ +--- +title: Examples +alias: + name: examples + text: Example gallery +--- + +## Data valuation + +

+ +- [__Shapley values__](shapley_basic_spotify/) + + --- + + An introduction using the spotify dataset, showcasing grouped datasets and + applied to improving model performance and identifying bogus data. + + [![](img/shapley_basic_spotify.png)](shapley_basic_spotify/) + +- [__KNN Shapley__](shapley_knn_flowers/) + + --- + + A showcase of a fast model-specific valuation method using the iris dataset. + + [![](img/shapley_knn_flowers.png)](shapley_knn_flowers/) + +- [__Data utility learning__](shapley_utility_learning/) + + --- + + Learning a utility function from a few evaluations and using it to estimate + the value of the remaining data. + + [![](img/shapley_utility_learning.png)](shapley_utility_learning/) + +- [__Least Core__](least_core_basic/) + + --- + + An alternative solution concept from game theory, illustrated on a + classification problem. + + [![](img/least_core_basic.png)](least_core_basic/) + +- [__Data OOB__](data_oob/) + + --- + + A different and fast strategy for data valuation, using the out-of-bag + error of a bagging model. + + [![](img/data_oob.png)](data_oob/) + +
+ + +## Influence functions + +
+ +- [__For CNNs__](influence_imagenet) + + --- + + Detecting corrupted labels with influence functions on the ImageNet dataset. + + [![](img/influence_imagenet.png)](influence_imagenet/) + +- [__For language models__](influence_sentiment_analysis/) + + --- + + Using the IMDB dataset for sentiment analysis and a fine-tuned BERT model. + + [![](img/influence_sentiment_analysis.png)](influence_sentiment_analysis/) + +- [__For mislabeled data__](influence_synthetic/) + + --- + + Detecting corrupted labels using a synthetic dataset. + + [![](img/influence_functions_example.png)](influence_synthetic/) + +- [__For outlier detection__](influence_wine/) + + --- + + Using the wine dataset + + [![](img/influence_wine.png)](influence_wine/) + + +
diff --git a/docs/getting-started/advanced-usage.md b/docs/getting-started/advanced-usage.md new file mode 100644 index 000000000..17bdf674f --- /dev/null +++ b/docs/getting-started/advanced-usage.md @@ -0,0 +1,190 @@ +--- +title: Advanced usage +alias: + name: advanced-usage + text: Advanced usage +--- + +# Advanced usage + +Besides the dos and don'ts of data valuation itself, which are the subject of +the examples and the documentation of each method, there are two main things to +keep in mind when using pyDVL namely Parallelization and Caching. + +## Parallelization { #setting-up-parallelization } + +pyDVL uses parallelization to scale and speed up computations. It does so +using one of Dask, Ray or Joblib. The first is used in +the [influence][pydvl.influence] package whereas the other two +are used in the [value][pydvl.value] package. + +### Data valuation + +For data valuation, pyDVL uses [joblib](https://joblib.readthedocs.io/en/latest/) for local +parallelization (within one machine) and supports using +[Ray](https://ray.io) for distributed parallelization (across multiple machines). + +The former works out of the box but for the latter you will need to install +additional dependencies (see [Extras][installation-extras]) +and to provide a running cluster (or run ray in local mode). + +!!! info + + As of v0.9.0 pyDVL does not allow requesting resources per task sent to the + cluster, so you will need to make sure that each worker has enough resources to + handle the tasks it receives. A data valuation task using game-theoretic methods + will typically make a copy of the whole model and dataset to each worker, even + if the re-training only happens on a subset of the data. This means that you + should make sure that each worker has enough memory to handle the whole dataset. + +#### Joblib + +Please follow the instructions in Joblib's documentation +for all possible configuration options that you can pass to the +[parallel_config][joblib.parallel_config] context manager. + +To use the joblib parallel backend with the `loky` backend and verbosity set to `100` +to compute exact shapley values you would use: + +```python +import joblib +from pydvl.parallel import ParallelConfig +from pydvl.value.shapley import combinatorial_exact_shapley +from pydvl.utils.utility import Utility + +config = ParallelConfig(backend="joblib") +u = Utility(...) + +with joblib.parallel_config(backend="loky", verbose=100): + combinatorial_exact_shapley(u, config=config) +``` + +#### Ray + +Please follow the instructions in Ray's documentation to +[set up a remote cluster](https://docs.ray.io/en/latest/cluster/key-concepts.html). +You could alternatively use a local cluster and in that case you don't have to set +anything up. + +Before starting a computation, you should initialize ray by calling +[`ray.init`][ray.init] with the appropriate parameters: + +To set up and start a local ray cluster with 4 CPUs you would use: + +```python +import ray + +ray.init(num_cpus=4) +``` + +Whereas for a remote ray cluster you would use: + +```python +import ray + +address = "" +ray.init(address) +``` + +To use the ray parallel backend to compute exact shapley values you would use: + +```python +import ray +from pydvl.parallel import ParallelConfig +from pydvl.value.shapley import combinatorial_exact_shapley +from pydvl.utils.utility import Utility + +ray.init() +config = ParallelConfig(backend="ray") +u = Utility(...) +combinatorial_exact_shapley(u, config=config) +``` + +### Influence functions + +Refer to [Scaling influence computation][scaling-influence-computation] for +explanations about parallelization for Influence Functions. + +## Caching { #getting-started-cache } + +PyDVL can cache (memoize) the computation of the utility function +and speed up some computations for data valuation. +It is however disabled by default. +When it is enabled it takes into account the data indices passed as argument +and the utility function wrapped into the +[Utility][pydvl.utils.utility.Utility] object. This means that +care must be taken when reusing the same utility function with different data, +see the documentation for the [caching package][pydvl.utils.caching] for more +information. + +In general, caching won't play a major role in the computation of Shapley values +because the probability of sampling the same subset twice, and hence needing +the same utility function computation, is very low. However, it can be very +useful when comparing methods that use the same utility function, or when +running multiple experiments with the same data. + +pyDVL supports 3 different caching backends: + +- [InMemoryCacheBackend][pydvl.utils.caching.memory.InMemoryCacheBackend]: + an in-memory cache backend that uses a dictionary to store and retrieve + cached values. This is used to share cached values between threads + in a single process. + +- [DiskCacheBackend][pydvl.utils.caching.disk.DiskCacheBackend]: + a disk-based cache backend that uses pickled values written to and read from disk. + This is used to share cached values between processes in a single machine. +- [MemcachedCacheBackend][pydvl.utils.caching.memcached.MemcachedCacheBackend]: + a [Memcached](https://memcached.org/)-based cache backend that uses pickled values written to + and read from a Memcached server. This is used to share cached values + between processes across multiple machines. + + ??? info "Memcached extras" + + The Memcached backend requires optional dependencies. + See [Extras][installation-extras] for more information. + +As an example, here's how one would use the disk-based cached backend +with a utility: + +```python +from pydvl.utils.caching.disk import DiskCacheBackend +from pydvl.utils.utility import Utility + +cache_backend = DiskCacheBackend() +u = Utility(..., cache_backend=cache_backend) +``` + +Please refer to the documentation and examples of each backend class for more details. + +!!! tip "When is the cache really necessary?" + Crucially, semi-value computations with the + [PermutationSampler][pydvl.value.sampler.PermutationSampler] require caching + to be enabled, or they will take twice as long as the direct implementation + in [compute_shapley_values][pydvl.value.shapley.compute_shapley_values]. + +!!! tip "Using the cache" + Continue reading about the cache in the documentation + for the [caching package][pydvl.utils.caching]. + +### Setting up the Memcached cache { #setting-up-memcached } + +[Memcached](https://memcached.org/) is an in-memory key-value store accessible +over the network. pyDVL can use it to cache the computation of the utility function +and speed up some computations (in particular, semi-value computations with the +[PermutationSampler][pydvl.value.sampler.PermutationSampler] but other methods +may benefit as well). + +You can either install it as a package or run it inside a docker container (the +simplest). For installation instructions, refer to the [Getting +started](https://github.com/memcached/memcached/wiki#getting-started) section in +memcached's wiki. Then you can run it with: + +```shell +memcached -u user +``` + +To run memcached inside a container in daemon mode instead, use: + +```shell +docker container run -d --rm -p 11211:11211 memcached:latest +``` diff --git a/docs/getting-started/applications.md b/docs/getting-started/applications.md new file mode 100644 index 000000000..87c2e0fcf --- /dev/null +++ b/docs/getting-started/applications.md @@ -0,0 +1,80 @@ +--- +title: Applications of data valuation +alias: + name: data-valuation-applications +--- + +# Applications of data valuation + +Data valuation methods can improve various aspects of data engineering and +machine learning workflows. When applied judiciously, these methods can enhance +data quality, model performance, and cost-effectiveness. + +However, the results can be inconsistent. Values have a strong dependency +on the training procedure and the performance metric used. For instance, +accuracy is a poor metric for imbalanced sets and this has a stark effect +on data values. Some models exhibit great variance in some regimes +and this again has a detrimental effect on values. See +[[data-valuation#problems-of-data-values|Problems of data values]] for more on +this. + +Here we quickly enumerate the most common uses of data valuation. For a +comprehensive overview, along with concrete examples, please refer to the +[Transferlab blog post]({{ transferlab.website }}blog/data-valuation-applications/) +on this topic. + +## Data engineering + +Some of the promising applications in data engineering include: + +- Removing low-value data points to increase model performance. +- Pruning redundant samples enables more efficient training of large models. +- Active learning. Points predicted to have high-value points can be prioritized + for labeling, reducing the cost of data collection. +- Analyzing high- and low-value data to guide data collection and improve + upstream data processes. Low-value points may reveal data issues to address. +- Identify irrelevant or duplicated data when evaluating offerings from data + providers. + +## Model development + +Some of the useful applications include: + +- Data attribution for interpretation and debugging: Analyzing the most or least + valuable samples for a class can reveal cases where the model relies on + confounding features instead of true signal. Investigating influential points + for misclassified examples highlights limitations to address. +- Sensitivity / robustness analysis: [@broderick_automatic_2021] shows that + removing a small fraction of highly influential data can completely flip model + conclusions. This can reveal potential issues with the modeling approach, data + collection process, or intrinsic difficulties of the problem that require + further inspection. +- Continual learning: in order to avoid forgetting when training on new data, + a subset of previously seen data is presented again. Data valuation can help + in the selection of the most valuable samples to retain. + +## Attacks + +Data valuation techniques have applications in detecting data manipulation and +contamination, although the feasibility of such attacks is limited. + +- Watermark removal: Points with low value on a correct validation set may be + part of a watermarking mechanism. +- Poisoning attacks: Influential points can be shifted to induce large changes + in model estimators. + + +## Data markets + +Additionally, one of the motivating applications for the whole field is that of +data markets, where data valuation can be the key component to determine the +price of data. + +Game-theoretic valuation methods like Shapley values can help assign fair prices, +but have limitations around handling duplicates or adversarial data. +Model-free methods like LAVA [@just_lava_2023] and CRAIG are +particularly well suited for this, as they use the Wasserstein distance between +a vendor's data and the buyer's to determine the value of the former. + +However, this is a complex problem which can face simple practical problems like +data owners not willing to disclose their data for valuation, even to a broker. diff --git a/docs/getting-started/benchmarking.md b/docs/getting-started/benchmarking.md new file mode 100644 index 000000000..e6e1b53a5 --- /dev/null +++ b/docs/getting-started/benchmarking.md @@ -0,0 +1,96 @@ +--- +title: Benchmarking valuation methods +--- + +# Benchmarking tasks + +Because the magnitudes of values or influences from different algorithms, or +datasets, are not comparable to each other, evaluation of the methods is +typically done with _downstream tasks_. + +## Benchmarking valuation methods + +Data valuation is particularly useful for data selection, pruning and +inspection in general. For this reason, the most common benchmarks are +**data removal** and **noisy label detection**. + +### High-value point removal + +After computing the values for all data in $T = \{ \mathbf{z}_i : i = 1, +\ldots, n \}$, the set is sorted by decreasing value. We denote by $T_{[i :]}$ +the sorted sequence of points $(\mathbf{z}_i, \mathbf{z}_{i + 1}, +\ldots, \mathbf{z}_n)$ for $1 \leqslant i \leqslant n$. Now train +successively $f_{T [i :]}$ and compute its accuracy $a_{T_{[i :]}} +(D_{\operatorname{test}})$ on the held-out test set, then plot all numbers. By +using $D_{\operatorname{test}}$ one approximates the expected accuracy drop on +unseen data. Because the points removed have a high value, one expects +performance to drop visibly wrt. a random baseline. + +### Low-value point removal + +The complementary experiment removes data in increasing order, with the lowest +valued points first. Here one expects performance to increase relatively to +randomly removing points before training. Additionally, every real dataset will +include slightly out-of-distribution points, so one should also expect an +absolute increase in performance when some of the lowest valued points are +removed. + +### Value transfer + +This experiment explores the extent to which data values computed with one +(cheap) model can be transferred to another (potentially more complex) one. +Different classifiers are used as a source to calculate data values. These +values are then used in the point removal tasks described above, but using a +different (target) model for evaluation of the accuracies $a_{T [i :]}$. A +multi-layer perceptron is added for evaluation as well. + +### Noisy label detection + +This experiment tests the ability of a method to detect mislabeled instances in +the data. A fixed fraction $\alpha$ of the training data are picked at random +and their labels flipped. Data values are computed, then the $\alpha$-fraction +of lowest-valued points are selected, and the overlap with the subset of flipped +points is computed. This synthetic experiment is however hard to put into +practical use, since the fraction $\alpha$ is of course unknown in practice. + +### Rank stability + +Introduced in [@wang_data_2022], one can look at how stable the top $k$% of +the values is across runs. Rank stability of a method is necessary but not +sufficient for good results. Ideally one wants to identify high-value points +reliably (good precision and recall) and consistently (good rank stability). + +## Benchmarking Influence function methods + +!!! Todo + This section is basically a stub + +Although in principle one can compute the average influence over the test set +and run the same tasks as above, because influences are computed for each pair +of training and test sample, they typically require different experiments to +compare their efficacy. + +### Approximation quality + +The biggest difficulty when computing influences is the approximation of the +inverse Hessian-vector product. For this reason one often sees in the literature +the quality of the approximation to LOO as an indicator of performance, the +exact Influence Function being a first order approximation to it. However, as +shown by [@bae_if_2022], the different approximation errors ensuing for lack of +convexity, approximate Hessian-vector products and so on, lead to this being a +poor benchmark overall. + +### Data re-labelling + +[@kong_resolving_2022] introduce a method using IFs to re-label harmful training +samples in order to improve accuracy. One can then take the obtained improvement +as a measure of the quality of the IF method. + +### Post-hoc fairness adjustment + +Introduced in [@...], the idea is to compute influences over a carefully +selected fair set, and using them to re-weight the training data. + + + + diff --git a/docs/getting-started/first-steps.md b/docs/getting-started/first-steps.md index 403724362..01a8b792b 100644 --- a/docs/getting-started/first-steps.md +++ b/docs/getting-started/first-steps.md @@ -1,14 +1,14 @@ --- -title: Getting Started +title: First steps alias: - name: getting-started - text: Getting Started + name: first-steps + text: First Steps --- -# Getting started +# First steps !!! Warning - Make sure you have read [[installation]] before using the library. + Make sure you have read [[getting-started#installation]] before using the library. In particular read about which extra dependencies you may need. ## Main concepts @@ -20,14 +20,14 @@ should be enough to get you started. * [[data-valuation]] for key objects and usage patterns for Shapley value computation and related methods. -* [[influence-values]] for instructions on how to compute influence functions. +* [[influence-function]] for instructions on how to compute influence functions. ## Running the examples If you are somewhat familiar with the concepts of data valuation, you can start by browsing our worked-out examples illustrating pyDVL's capabilities either: -- In the examples under [[data-valuation]] and [[influence-values]]. +- In the examples under [[data-valuation]] and [[influence-function]]. - Using [binder](https://mybinder.org/) notebooks, deployed from each example's page. - Locally, by starting a jupyter server at the root of the project. You will @@ -36,104 +36,5 @@ by browsing our worked-out examples illustrating pyDVL's capabilities either: ## Advanced usage -Besides the dos and don'ts of data valuation itself, which are the subject of -the examples and the documentation of each method, there are two main things to -keep in mind when using pyDVL. - -### Caching - -PyDVL can cache (memoize) the computation of the utility function -and speed up some computations for data valuation. -It is however disabled by default. -When it is enabled it takes into account the data indices passed as argument -and the utility function wrapped into the -[Utility][pydvl.utils.utility.Utility] object. This means that -care must be taken when reusing the same utility function with different data, -see the documentation for the [caching package][pydvl.utils.caching] for more -information. - -In general, caching won't play a major role in the computation of Shapley values -because the probability of sampling the same subset twice, and hence needing -the same utility function computation, is very low. However, it can be very -useful when comparing methods that use the same utility function, or when -running multiple experiments with the same data. - -pyDVL supports 3 different caching backends: - -- [InMemoryCacheBackend][pydvl.utils.caching.memory.InMemoryCacheBackend]: - an in-memory cache backend that uses a dictionary to store and retrieve - cached values. This is used to share cached values between threads - in a single process. -- [DiskCacheBackend][pydvl.utils.caching.disk.DiskCacheBackend]: - a disk-based cache backend that uses pickled values written to and read from disk. - This is used to share cached values between processes in a single machine. -- [MemcachedCacheBackend][pydvl.utils.caching.memcached.MemcachedCacheBackend]: - a [Memcached](https://memcached.org/)-based cache backend that uses pickled values written to - and read from a Memcached server. This is used to share cached values - between processes across multiple machines. - - **Note** This specific backend requires optional dependencies. - See [[installation#extras]] for more information) - -!!! tip "When is the cache really necessary?" - Crucially, semi-value computations with the - [PermutationSampler][pydvl.value.sampler.PermutationSampler] require caching - to be enabled, or they will take twice as long as the direct implementation - in [compute_shapley_values][pydvl.value.shapley.compute_shapley_values]. - -!!! tip "Using the cache" - Continue reading about the cache in the documentation - for the [caching package][pydvl.utils.caching]. - -#### Setting up the Memcached cache - -[Memcached](https://memcached.org/) is an in-memory key-value store accessible -over the network. pyDVL can use it to cache the computation of the utility function -and speed up some computations (in particular, semi-value computations with the -[PermutationSampler][pydvl.value.sampler.PermutationSampler] but other methods -may benefit as well). - -You can either install it as a package or run it inside a docker container (the -simplest). For installation instructions, refer to the [Getting -started](https://github.com/memcached/memcached/wiki#getting-started) section in -memcached's wiki. Then you can run it with: - -```shell -memcached -u user -``` - -To run memcached inside a container in daemon mode instead, use: - -```shell -docker container run -d --rm -p 11211:11211 memcached:latest -``` - -### Parallelization - -pyDVL uses [joblib](https://joblib.readthedocs.io/en/latest/) for local -parallelization (within one machine) and supports using -[Ray](https://ray.io) for distributed parallelization (across multiple machines). - -The former works out of the box but for the latter you will need to install -additional dependencies (see [[installation#extras]] ) -and to provide a running cluster (or run ray in local mode). - -As of v0.7.0 pyDVL does not allow requesting resources per task sent to the -cluster, so you will need to make sure that each worker has enough resources to -handle the tasks it receives. A data valuation task using game-theoretic methods -will typically make a copy of the whole model and dataset to each worker, even -if the re-training only happens on a subset of the data. This means that you -should make sure that each worker has enough memory to handle the whole dataset. - -#### Ray - -Please follow the instructions in Ray's documentation to set up a cluster. -Once you have a running cluster, you can use it by passing the address -of the head node to parallel methods via [ParallelConfig][pydvl.parallel.config.ParallelConfig]. - -For a local ray cluster you would use: - -```python -from pydvl.parallel.config import ParallelConfig -config = ParallelConfig(backend="ray") -``` +Refer to the [[advanced-usage]] page for explanations on how to enable +and use parallelization and caching. diff --git a/docs/getting-started/glossary.md b/docs/getting-started/glossary.md new file mode 100644 index 000000000..56ce7786f --- /dev/null +++ b/docs/getting-started/glossary.md @@ -0,0 +1,280 @@ +# Glossary + +This glossary is meant to provide only brief explanations of each term, helping +to clarify the concepts and techniques used in the library. For more detailed +information, please refer to the relevant literature or resources. + +!!! warning + This glossary is still a work in progress. Pull requests are welcome! + +Terms in data valuation and influence functions: + +### Arnoldi Method + +The Arnoldi method approximately computes eigenvalue, eigenvector pairs of +a symmetric matrix. For influence functions, it is used to approximate +the [iHVP][inverse-hessian-vector-product]. +Introduced by [@schioppa_scaling_2022] in the context of influence functions. + + * [Implementation (torch) + ][pydvl.influence.torch.influence_function_model.ArnoldiInfluence] + * [Documentation (torch)][arnoldi] + +### Block Conjugate Gradient + +A blocked version of [CG][conjugate-gradient], which solves several linear +systems simultaneously. For Influence Functions, it is used to +approximate the [iHVP][inverse-hessian-vector-product]. + + * [Implementation (torch) + ][pydvl.influence.torch.influence_function_model.CgInfluence] + * [Documentation (torch)][cg] + +### Class-wise Shapley + +Class-wise Shapley is a Shapley valuation method which introduces a utility +function that balances in-class, and out-of-class accuracy, with the goal of +favoring points that improve the model's performance on the class they belong +to. It is estimated to be particularly useful in imbalanced datasets, but more +research is needed to confirm this. +Introduced by [@schoch_csshapley_2022]. + + * [Implementation + ][pydvl.value.shapley.classwise.compute_classwise_shapley_values] + * [Documentation][class-wise-shapley] + +### Conjugate Gradient + +CG is an algorithm for solving linear systems with a symmetric and +positive-definite coefficient matrix. For Influence Functions, it is used to +approximate the [iHVP][inverse-hessian-vector-product]. + + * [Implementation (torch) +][pydvl.influence.torch.influence_function_model.CgInfluence] + * [Documentation (torch)][cg] + +### Data Utility Learning + +Data Utility Learning is a method that uses an ML model to learn the utility +function. Essentially, it learns to predict the performance of a model when +trained on a given set of indices from the dataset. The cost of training this +model is quickly amortized by avoiding costly re-evaluations of the original +utility. +Introduced by [@wang_improving_2022]. + + * [Implementation][pydvl.utils.utility.DataUtilityLearning] + * [Documentation][creating-a-utility] + +### Eigenvalue-corrected Kronecker-Factored Approximate Curvature + +EKFAC builds on [K-FAC][kronecker-factored-approximate-curvature] by correcting +for the approximation errors in the eigenvalues of the blocks of the +Kronecker-factored approximate curvature matrix. This correction aims to refine +the accuracy of natural gradient approximations, thus potentially offering +better training efficiency and stability in neural networks. + + * [Implementation (torch) + ][pydvl.influence.torch.influence_function_model.EkfacInfluence] + * [Documentation (torch)][eigenvalue-corrected-k-fac] + + +### Group Testing + +Group Testing is a strategy for identifying characteristics within groups of +items efficiently, by testing groups rather than individuals to quickly narrow +down the search for items with specific properties. +Introduced into data valuation by [@jia_efficient_2019a]. + + * [Implementation][pydvl.value.shapley.gt.group_testing_shapley] + * [Documentation][group-testing] + +### Influence Function + +The Influence Function measures the impact of a single data point on a +statistical estimator. In machine learning, it's used to understand how much a +particular data point affects the model's prediction. +Introduced into data valuation by [@koh_understanding_2017]. + + * [[influence-function|Documentation]] + +### Inverse Hessian-vector product + +iHVP is the operation of calculating the product of the inverse Hessian matrix +of a function and a vector, without explicitly constructing nor inverting the +full Hessian matrix first. This is essential for influence function computation. + +### Kronecker-Factored Approximate Curvature + +K-FAC is an optimization technique that approximates the Fisher Information +matrix's inverse efficiently. It uses the Kronecker product to factor the +matrix, significantly speeding up the computation of natural gradient updates +and potentially improving training efficiency. + +### Least Core + +The Least Core is a solution concept in cooperative game theory, referring to +the smallest set of payoffs to players that cannot be improved upon by any +coalition, ensuring stability in the allocation of value. In data valuation, +it implies solving a linear and a quadratic system whose constraints are +determined by the evaluations of the utility function on every subset of the +training data. +Introduced as data valuation method by [@yan_if_2021]. + + * [Implementation][pydvl.value.least_core.compute_least_core_values] + * [Documentation][least-core-values] + +### Linear-time Stochastic Second-order Algorithm + +LiSSA is an efficient algorithm for approximating the inverse Hessian-vector +product, enabling faster computations in large-scale machine learning +problems, particularly for second-order optimization. +For Influence Functions, it is used to +approximate the [iHVP][inverse-hessian-vector-product]. +Introduced by [@agarwal_secondorder_2017]. + + * [Implementation (torch) + ][pydvl.influence.torch.influence_function_model.LissaInfluence] + * [Documentation (torch) + ][linear-time-stochastic-second-order-approximation-lissa] + +### Leave-One-Out + +LOO in the context of data valuation refers to the process of evaluating the +impact of removing individual data points on the model's performance. The +value of a training point is defined as the marginal change in the model's +performance when that point is removed from the training set. + + * [Implementation][pydvl.value.loo.loo.compute_loo] + * [Documentation][leave-one-out-values] + +### Monte Carlo Least Core + +MCLC is a variation of the Least Core that uses a reduced amount of +constraints, sampled randomly from the powerset of the training data. +Introduced by [@yan_if_2021]. + + * [Implementation][pydvl.value.least_core.compute_least_core_values] + * [Documentation][monte-carlo-least-core] + +### Monte Carlo Shapley + +MCS estimates the Shapley Value using a Monte Carlo approximation to the sum +over subsets of the training set. This reduces computation to polynomial time +at the cost of accuracy, but this loss is typically irrelevant for downstream +applications in ML. +Introduced into data valuation by [@ghorbani_data_2019]. + + * [Implementation][pydvl.value.shapley.montecarlo] + * [Documentation][monte-carlo-combinatorial-shapley] + +### Nyström Low-Rank Approximation + +The Nyström approximation computes a low-rank approximation to a symmetric +positive-definite matrix via random projections. For influence functions, +it is used to approximate the [iHVP][inverse-hessian-vector-product]. +Introduced as sketch and solve algorithm in [@hataya_nystrom_2023], and as +preconditioner for [PCG][preconditioned-conjugate-gradient] in +[@frangella_randomized_2023]. + + * [Implementation Sketch-and-Solve (torch) + ][pydvl.influence.torch.influence_function_model.NystroemSketchInfluence] + * [Documentation Sketch-and-Solve (torch)][nystrom-sketch-and-solve] + * [Implementation Preconditioner (torch) + ][pydvl.influence.torch.pre_conditioner.NystroemPreConditioner] + +### Point removal task + +A task in data valuation where the quality of a valuation method is measured +through the impact of incrementally removing data points on the model's +performance, where the points are removed in order of their value. See + + * [Benchmarking tasks][benchmarking-tasks] + +### Preconditioned Block Conjugate Gradient + +A blocked version of [PCG][preconditioned-conjugate-gradient], which solves +several linear systems simultaneously. For Influence Functions, it is used to +approximate the [iHVP][inverse-hessian-vector-product]. + + * [Implementation CG (torch) + ][pydvl.influence.torch.influence_function_model.CgInfluence] + * [Implementation Preconditioner (torch) + ][pydvl.influence.torch.pre_conditioner] + * [Documentation (torch)][cg] + +### Preconditioned Conjugate Gradient + +A preconditioned version of [CG][conjugate-gradient] for improved +convergence, depending on the characteristics of the matrix and the +preconditioner. For Influence Functions, it is used to +approximate the [iHVP][inverse-hessian-vector-product]. + + * [Implementation CG (torch) + ][pydvl.influence.torch.influence_function_model.CgInfluence] + * [Implementation Preconditioner (torch) + ][pydvl.influence.torch.pre_conditioner] + * [Documentation (torch)][cg] + +### Shapley Value + +Shapley Value is a concept from cooperative game theory that allocates payouts +to players based on their contribution to the total payoff. In data valuation, +players are data points. The method assigns a value to each data point based +on a weighted average of its marginal contributions to the model's performance +when trained on each subset of the training set. This requires +$\mathcal{O}(2^{n-1})$ re-trainings of the model, which is infeasible for even +trivial data set sizes, so one resorts to approximations like TMCS. +Introduced into data valuation by [@ghorbani_data_2019]. + + * [Implementation][pydvl.value.shapley.naive] + * [Documentation][shapley-value] + +### Truncated Monte Carlo Shapley + +TMCS is an efficient approach to estimating the Shapley Value using a +truncated version of the Monte Carlo method, reducing computation time while +maintaining accuracy in large datasets. +Introduced by [@ghorbani_data_2019]. + + * [Implementation + ][pydvl.value.shapley.montecarlo.permutation_montecarlo_shapley] + * [Documentation][permutation-shapley] + +### Weighted Accuracy Drop + +WAD is a metric to evaluate the impact of sequentially removing data points on +the performance of a machine learning model, weighted by their rank, i.e. by the +time at which they were removed. +Introduced by [@schoch_csshapley_2022]. + +--- + +## Other terms + +### Coefficient of Variation + +CV is a statistical measure of the dispersion of data points in a data series +around the mean, expressed as a percentage. It's used to compare the degree of +variation from one data series to another, even if the means are drastically +different. + + +### Constraint Satisfaction Problem + +A CSP involves finding values for variables within specified constraints or +conditions, commonly used in scheduling, planning, and design problems where +solutions must satisfy a set of restrictions. + +### Out-of-Bag + +OOB refers to data samples in an ensemble learning context (like random forests) +that are not selected for training a specific model within the ensemble. These +OOB samples are used as a validation set to estimate the model's accuracy, +providing a convenient internal cross-validation mechanism. + +### Machine Learning Reproducibility Challenge + +The [MLRC](https://reproml.org/) is an initiative that encourages the +verification and replication of machine learning research findings, promoting +transparency and reliability in the field. Papers are published in +[Transactions on Machine Learning Research](https://jmlr.org/tmlr/) (TMLR). diff --git a/docs/getting-started/index.md b/docs/getting-started/index.md new file mode 100644 index 000000000..da4b2c1eb --- /dev/null +++ b/docs/getting-started/index.md @@ -0,0 +1,107 @@ +--- +title: Getting started +alias: + name: getting-started + title: Getting started +--- + +# Getting started + +If you want to jump straight in, [install pyDVL](#installation) +and then check out [[examples|the examples]]. You will probably want to install +with support for [influence function computation](#installation-influences). + +We have introductions to the ideas behind [[data-valuation|Data valuation]] and +[[influence-function|Influence functions]], as well as a short overview of +[common applications](applications). + + +## Installing pyDVL { # installation } + +To install the latest release use: + +```shell +pip install pyDVL +``` + +See [Extras][installation-extras] for optional dependencies, in particular if +you are interested in influence functions. You can also install the latest +development version from [TestPyPI](https://test.pypi.org/project/pyDVL/): + +```shell +pip install pyDVL --index-url https://test.pypi.org/simple/ +``` + +In order to check the installation you can use: + +```shell +python -c "import pydvl; print(pydvl.__version__)" +``` + +## Dependencies + +pyDVL requires Python >= 3.8, [numpy](https://numpy.org/), +[scikit-learn](https://scikit-learn.org/stable/), [scipy](https://scipy.org/), +[cvxpy](https://www.cvxpy.org/) for the core methods, and +[joblib](https://joblib.readthedocs.io/en/stable/) for parallelization locally. +Additionally,the [Influence functions][pydvl.influence] module requires PyTorch +(see [Extras][installation-extras] below). + + +## Extras { #installation-extras } + +pyDVL has a few [extra](https://peps.python.org/pep-0508/#extras) dependencies +that can be optionally installed: + +### Influence functions { #installation-influences } + +To use the module on influence functions, [pydvl.influence][pydvl.influence], +run: + +```shell +pip install pyDVL[influence] +``` + +This includes a dependency on [PyTorch](https://pytorch.org/) (Version 2.0 and +above) and thus is left out by default. + +### CuPy + +In case that you have a supported version of CUDA installed (v11.2 to 11.8 as of +this writing), you can enable eigenvalue computations for low-rank approximations +with [CuPy](https://docs.cupy.dev/en/stable/index.html) on the GPU by using: + +```shell +pip install pyDVL[cupy] +``` + +This installs [cupy-cuda11x](https://pypi.org/project/cupy-cuda11x/). + +If you use a different version of CUDA, please install CuPy +[manually](https://docs.cupy.dev/en/stable/install.html). + +### Ray + +If you want to use [Ray](https://www.ray.io/) to distribute data valuation +workloads across nodes in a cluster (it can be used locally as well, but for +this we recommend joblib instead) install pyDVL using: + +```shell +pip install pyDVL[ray] +``` + +see [the intro to parallelization][setting-up-parallelization] for more +details on how to use it. + +### Memcached + +If you want to use [Memcached](https://memcached.org/) for caching utility +evaluations, use: + +```shell +pip install pyDVL[memcached] +``` + +This installs [pymemcache](https://github.com/pinterest/pymemcache) +additionally. Be aware that you still have to start a memcached server manually. +See [Setting up the Memcached cache][setting-up-memcached]. diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md deleted file mode 100644 index 125f81d13..000000000 --- a/docs/getting-started/installation.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -title: Installing pyDVL -alias: - name: installation - text: Installing pyDVL ---- - -# Installing pyDVL - -To install the latest release use: - -```shell -pip install pyDVL -``` - -You can also install the latest development version from -[TestPyPI](https://test.pypi.org/project/pyDVL/): - -```shell -pip install pyDVL --index-url https://test.pypi.org/simple/ -``` - -In order to check the installation you can use: - -```shell -python -c "import pydvl; print(pydvl.__version__)" -``` - -## Dependencies - -pyDVL requires Python >= 3.8, [numpy](https://numpy.org/), -[scikit-learn](https://scikit-learn.org/stable/), [scipy](https://scipy.org/), -[cvxpy](https://www.cvxpy.org/) for the Core methods, -and [joblib](https://joblib.readthedocs.io/en/stable/) -for parallelization locally. Additionally,the [Influence functions][pydvl.influence] -module requires PyTorch (see [[installation#extras]]). - -### Extras - -pyDVL has a few [extra](https://peps.python.org/pep-0508/#extras) dependencies -that can be optionally installed: - -- `influence`: - - To use all features of influence functions use instead: - - ```shell - pip install pyDVL[influence] - ``` - - This includes a dependency on [PyTorch](https://pytorch.org/) (Version 2.0 and - above) and thus is left out by default. - -- `cupy`: - - In case that you have a supported version of CUDA installed (v11.2 to 11.8 as of - this writing), you can enable eigenvalue computations for low-rank approximations - with [CuPy](https://docs.cupy.dev/en/stable/index.html) on the GPU by using: - - ```shell - pip install pyDVL[cupy] - ``` - - This installs [cupy-cuda11x](https://pypi.org/project/cupy-cuda11x/). - - If you use a different version of CUDA, please install CuPy - [manually](https://docs.cupy.dev/en/stable/install.html). - -- `ray`: - - If you want to use [Ray](https://www.ray.io/) to distribute data valuation - workloads across nodes in a cluster (it can be used locally as well, - but for this we recommend joblib instead) install pyDVL using: - - ```shell - pip install pyDVL[ray] - ``` - - see [[getting-started#ray]] for more details on how to use it. - -- `memcached`: - - If you want to use [Memcached](https://memcached.org/) for caching - utility evaluations, use: - - ```shell - pip install pyDVL[memcached] - ``` - - This installs [pymemcache](https://github.com/pinterest/pymemcache) additionally. diff --git a/docs/getting-started/methods.md b/docs/getting-started/methods.md new file mode 100644 index 000000000..8f6da70c1 --- /dev/null +++ b/docs/getting-started/methods.md @@ -0,0 +1,65 @@ +--- +title: Methods +alias: + name: methods + text: Methods +--- + +We currently implement the following methods: + +## Data valuation + +- [**LOO**][pydvl.value.loo.compute_loo]. + +- [**Permutation Shapley**][pydvl.value.shapley.montecarlo.permutation_montecarlo_shapley] + (also called **ApproxShapley**) [@castro_polynomial_2009]. + +- [**TMCS**][pydvl.value.shapley.compute_shapley_values] + [@ghorbani_data_2019]. + +- [**Data Banzhaf**][pydvl.value.semivalues.compute_banzhaf_semivalues] + [@wang_data_2022]. + +- [**Beta Shapley**][pydvl.value.semivalues.compute_beta_shapley_semivalues] + [@kwon_beta_2022]. + +- [**CS-Shapley**][pydvl.value.shapley.classwise.compute_classwise_shapley_values] + [@schoch_csshapley_2022]. + +- [**Least Core**][pydvl.value.least_core.montecarlo.montecarlo_least_core] + [@yan_if_2021]. + +- [**Owen Sampling**][pydvl.value.shapley.owen.owen_sampling_shapley] + [@okhrati_multilinear_2021]. + +- [**Data Utility Learning**][pydvl.utils.utility.DataUtilityLearning] + [@wang_improving_2022]. + +- [**kNN-Shapley**][pydvl.value.shapley.knn.knn_shapley] + [@jia_efficient_2019a]. + +- [**Group Testing**][pydvl.value.shapley.gt.group_testing_shapley] + [@jia_efficient_2019] + +- [**Data-OOB**][pydvl.value.oob.compute_data_oob] + [@kwon_dataoob_2023]. + +## Influence functions + +- [**CG Influence**][pydvl.influence.torch.CgInfluence]. + [@koh_understanding_2017]. + +- [**Direct Influence**][pydvl.influence.torch.DirectInfluence] + [@koh_understanding_2017]. + +- [**LiSSA**][pydvl.influence.torch.LissaInfluence] + [@agarwal_secondorder_2017]. + +- [**Arnoldi Influence**][pydvl.influence.torch.ArnoldiInfluence] + [@schioppa_scaling_2022]. + +- [**EKFAC Influence**][pydvl.influence.torch.EkfacInfluence] + [@george_fast_2018;@martens_optimizing_2015]. + +- [**Nyström Influence**][pydvl.influence.torch.NystroemSketchInfluence], based + on the ideas in [@hataya_nystrom_2023] for bi-level optimization. diff --git a/docs/index.md b/docs/index.md index fb6408b9e..0da898ebf 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,30 +5,55 @@ title: Home # The python library for data valuation pyDVL collects algorithms for data valuation and influence function computation. -It runs most of them in parallel either locally or in a cluster and supports -distributed caching of results. +For the full list see [[methods]]. It supports out-of-core and distributed +computation, as well as local or distributed caching of results. -If you're a first time user of pyDVL, we recommend you to go through the -[[getting-started]] and [[installation]] guides. +If you're a first time user of pyDVL, we recommend you to go through +[[getting-started]]. -::cards:: cols=2 -- title: Installation - content: Steps to install and requirements - url: getting-started/installation.md +
+ +- :fontawesome-solid-toolbox:{ .lg .middle } + [[getting-started|__Getting started__]] + + --- + Steps to install and requirements + +- :fontawesome-regular-images:{ .lg .middle } + [[examples|__Example gallery__]] + + --- + + Notebooks with worked-out examples of data valuation and influence functions + +- :fontawesome-solid-scale-unbalanced:{ .lg .middle } + [[data-valuation|__Data valuation__]] + + --- -- title: Data valuation - content: > Basics of data valuation and description of the main algorithms - url: value/ -- title: Influence Function - content: > +- :fontawesome-solid-scale-unbalanced-flip:{ .lg .middle } + [[influence-function|__Influence Function__]] + + --- + An introduction to the influence function and its computation with pyDVL - url: influence/ -- title: Browse the API - content: Full documentation of the API - url: api/pydvl/ +- :material-cog:{ .lg .middle } + [[methods|__Supported methods__]] + + --- + + List of all methods implemented with references. + + +- :fontawesome-regular-file-code:{ .lg .middle } + [__API Reference__](api/pydvl/) + + --- + + Full documentation of the API -::/cards:: +
diff --git a/docs/influence/index.md b/docs/influence/index.md index 8e89108c7..3570b52da 100644 --- a/docs/influence/index.md +++ b/docs/influence/index.md @@ -1,7 +1,7 @@ --- title: The influence function alias: - name: influence-values + name: influence-function text: Computing Influence Values --- @@ -24,7 +24,7 @@ intractable for large models like deep neural networks. Much of the recent research tackles this issue using approximations, like a Neuman series [@agarwal_secondorder_2017], with the most successful solution using a low-rank approximation that iteratively finds increasing eigenspaces of the Hessian -[@schioppa_scaling_2021]. +[@schioppa_scaling_2022]. pyDVL implements several methods for the efficient computation of the IF for machine learning. In the examples we document some of the difficulties that can diff --git a/docs/influence/influence_function_model.md b/docs/influence/influence_function_model.md index a126efdc5..0a424e918 100644 --- a/docs/influence/influence_function_model.md +++ b/docs/influence/influence_function_model.md @@ -1,7 +1,9 @@ In almost every practical application it is not possible to construct, even less -invert the complete Hessian in memory. pyDVL offers several implementations of the interface -[InfluenceFunctionModel][pydvl.influence.base_influence_function_model.InfluenceFunctionModel], which do not compute -the full Hessian (in contrast to [DirectInfluence][pydvl.influence.torch.influence_function_model.DirectInfluence]). +invert the complete Hessian in memory. pyDVL offers several implementations of +the interface [InfluenceFunctionModel +][pydvl.influence.base_influence_function_model.InfluenceFunctionModel], +which do not compute the full Hessian (in contrast to [DirectInfluence +][pydvl.influence.torch.influence_function_model.DirectInfluence]). ### Conjugate Gradient @@ -11,26 +13,47 @@ method that does not require the explicit inversion of the Hessian. Instead, it only requires the calculation of Hessian-vector products, making it a good choice for large datasets or models with many parameters. It is nevertheless much slower to converge than the direct inversion method and not as accurate. + More info on the theory of conjugate gradient can be found on -[Wikipedia](https://en.wikipedia.org/wiki/Conjugate_gradient_method). +[Wikipedia](https://en.wikipedia.org/wiki/Conjugate_gradient_method), or in +text books such as [@trefethen_numerical_1997, Lecture 38]. + +pyDVL also implements a stable block variant of the conjugate +gradient method, defined in [@ji_breakdownfree_2017], which solves several +right hand sides simultaneously. + +Optionally, the user can provide a pre-conditioner to improve convergence, such +as a [Jacobi pre-conditioner +][pydvl.influence.torch.pre_conditioner.JacobiPreConditioner], which +is a simple [diagonal pre-conditioner]( +https://en.wikipedia.org/wiki/Preconditioner#Jacobi_(or_diagonal)_preconditioner) +based on Hutchinson's diagonal estimator [@bekas_estimator_2007], +or a [Nyström approximation based pre-conditioner +][pydvl.influence.torch.pre_conditioner.NystroemPreConditioner], +described in [@frangella_randomized_2023]. ```python from pydvl.influence.torch import CgInfluence +from pydvl.influence.torch.pre_conditioner import NystroemPreConditioner if_model = CgInfluence( model, loss, hessian_regularization=0.0, - x0=None, rtol=1e-7, atol=1e-7, maxiter=None, + use_block_cg=True, + pre_conditioner=NystroemPreConditioner(rank=10) ) +if_model.fit(train_loader) ``` -The additional optional parameters `x0`, `rtol`, `atol`, and `maxiter` are -respectively the initial guess for the solution, the relative -tolerance, the absolute tolerance, and the maximum number of iterations. +The additional optional parameters `rtol`, `atol`, `maxiter`, `use_block_cg` and +`pre_conditioner` are respectively, the relative +tolerance, the absolute tolerance, the maximum number of iterations, +a flag indicating whether to use block variant of cg and an optional +pre-conditioner. ### Linear time Stochastic Second-Order Approximation (LiSSA) @@ -62,6 +85,7 @@ if_model = LissaInfluence( h0=None, rtol=1e-4, ) +if_model.fit(train_loader) ``` with the additional optional parameters `maxiter`, `dampen`, `scale`, `h0`, and @@ -83,7 +107,7 @@ approximated by where \(D\) is a diagonal matrix with the top (in absolute value) eigenvalues of the Hessian and \(V\) contains the corresponding eigenvectors. See also -[@schioppa_scaling_2021]. +[@schioppa_scaling_2022]. ```python from pydvl.influence.torch import ArnoldiInfluence @@ -94,17 +118,26 @@ if_model = ArnoldiInfluence( rank_estimate=10, tol=1e-6, ) +if_model.fit(train_loader) ``` -These implementations represent the calculation logic on in memory tensors. To scale up to large collection -of data, we map these influence function models over these collections. For a detailed discussion see the -documentation page [Scaling Computation](scaling_computation.md). ### Eigenvalue Corrected K-FAC -K-FAC, short for Kronecker-Factored Approximate Curvature, is a method that approximates the Fisher information matrix [FIM](https://en.wikipedia.org/wiki/Fisher_information) of a model. It is possible to show that for classification models with appropriate loss functions the FIM is equal to the Hessian of the model’s loss over the dataset. In this restricted but nonetheless important context K-FAC offers an efficient way to approximate the Hessian and hence the influence scores. -For more info and details refer to the original paper [@martens2015optimizing]. - -The K-FAC method is implemented in the class [EkfacInfluence](pydvl/influence/torch/influence_function_model.py). The following code snippet shows how to use the K-FAC method to calculate the influence function of a model. Note that, in contrast to the other methods for influence function calculation, K-FAC does not require the loss function as an input. This is because the current implementation is only applicable to classification models with a cross entropy loss function. +K-FAC, short for Kronecker-Factored Approximate Curvature, is a method that +approximates the Fisher information matrix [FIM](https://en.wikipedia.org/wiki/Fisher_information) of a model. +It is possible to show that for classification models with appropriate loss +functions the FIM is equal to the Hessian of the model’s loss over the dataset. +In this restricted but nonetheless important context K-FAC offers an efficient +way to approximate the Hessian and hence the influence scores. +For more info and details refer to the original paper [@martens_optimizing_2015]. + +The K-FAC method is implemented in the class [EkfacInfluence +][pydvl.influence.torch.influence_function_model.EkfacInfluence]. +The following code snippet shows how to use the K-FAC method to calculate the +influence function of a model. Note that, in contrast to the other methods for +influence function calculation, K-FAC does not require the loss function as an +input. This is because the current implementation is only applicable to +classification models with a cross entropy loss function. ```python from pydvl.influence.torch import EkfacInfluence @@ -112,10 +145,23 @@ if_model = EkfacInfluence( model, hessian_regularization=0.0, ) +if_model.fit(train_loader) ``` -Upon initialization, the K-FAC method will parse the model and extract which layers require grad and which do not. Then it will only calculate the influence scores for the layers that require grad. The current implementation of the K-FAC method is only available for linear layers, and therefore if the model contains non-linear layers that require gradient the K-FAC method will raise a NotImplementedLayerRepresentationException. - -A further improvement of the K-FAC method is the Eigenvalue Corrected K-FAC (EKFAC) method [@george2018fast], which allows to further re-fit the eigenvalues of the Hessian, thus providing a more accurate approximation. On top of the K-FAC method, the EKFAC method is implemented by setting `update_diagonal=True` when initialising [EkfacInfluence](pydvl/influence/torch/influence_function_model.py). The following code snippet shows how to use the EKFAC method to calculate the influence function of a model. +Upon initialization, the K-FAC method will parse the model and extract which +layers require grad and which do not. Then it will only calculate the influence +scores for the layers that require grad. The current implementation of the +K-FAC method is only available for linear layers, and therefore if the model +contains non-linear layers that require gradient the K-FAC method will raise a +NotImplementedLayerRepresentationException. + +A further improvement of the K-FAC method is the Eigenvalue Corrected +K-FAC (EKFAC) method [@george_fast_2018], which allows to further re-fit the +eigenvalues of the Hessian, thus providing a more accurate approximation. +On top of the K-FAC method, the EKFAC method is implemented by setting +`update_diagonal=True` when initialising [EkfacInfluence +][pydvl.influence.torch.influence_function_model.EkfacInfluence]. +The following code snippet shows how to use the EKFAC method to calculate the +influence function of a model. ```python from pydvl.influence.torch import EkfacInfluence @@ -125,4 +171,43 @@ if_model = EkfacInfluence( hessian_regularization=0.0, ) if_model.fit(train_loader) -``` \ No newline at end of file +``` + +### Nyström Sketch-and-Solve + +This approximation is based on a Nyström low-rank approximation of the form + +\begin{align*} +H_{\text{nys}} &= (H\Omega)(\Omega^TH\Omega)^{\dagger}(H\Omega)^T \\\ +&= U \Lambda U^T, +\end{align*} + +where $(\cdot)^{\dagger}$ denotes the [Moore-Penrose inverse]( +https://en.wikipedia.org/wiki/Moore%E2%80%93Penrose_inverse), +in combination with the [Sherman–Morrison–Woodbury formula]( +https://en.wikipedia.org/wiki/Woodbury_matrix_identity) to calculate the +action of its inverse: + +\begin{equation*} +(H_{\text{nys}} + \lambda I)^{-1}x = U(\Lambda+\lambda I)U^Tx + +\frac{1}{\lambda}(I−UU^T)x, +\end{equation*} + +see also [@hataya_nystrom_2023] and [@frangella_randomized_2023]. The essential +parameter is the rank of the approximation. + +```python +from pydvl.influence.torch import NystroemSketchInfluence +if_model = NystroemSketchInfluence( + model, + loss, + rank=10, + hessian_regularization=0.0, +) +if_model.fit(train_loader) +``` + +These implementations represent the calculation logic on in memory tensors. +To scale up to large collection of data, we map these influence function models +over these collections. For a detailed discussion see the +documentation page [Scaling Computation](scaling_computation.md). diff --git a/docs/influence/scaling_computation.md b/docs/influence/scaling_computation.md index f21d8700e..b8ffbe98f 100644 --- a/docs/influence/scaling_computation.md +++ b/docs/influence/scaling_computation.md @@ -1,3 +1,9 @@ +--- +title: Scaling Influence Computation +--- + +# Scaling Influence Computation + The implementations of [InfluenceFunctionModel][pydvl.influence.base_influence_function_model.InfluenceFunctionModel] provide a convenient way to calculate influences for in memory tensors. diff --git a/docs/overrides/partials/integrations/analytics/simpleanalytics.html b/docs/overrides/partials/integrations/analytics/simpleanalytics.html new file mode 100644 index 000000000..37aee7ffa --- /dev/null +++ b/docs/overrides/partials/integrations/analytics/simpleanalytics.html @@ -0,0 +1,18 @@ +{% set site_domain = config.site_url | replace('https://', '') | replace('http://', '') | trim('/') %} +{% set dev_domain = config.dev_addr | replace('https://', '') | replace('http://', '') | trim('/') %} +{% set develop = site_domain.startswith(dev_domain) %} +{% if not develop %} + {% set hostname = config.extra.analytics.hostname | default(site_domain) %} + {% set script_domain = config.extra.analytics.script_domain | default('scripts.simpleanalyticscdn.com') %} + {% set noscript_domain = config.extra.analytics.noscript_domain | default('queue.simpleanalyticscdn.com') %} + {% set collect_dnt = config.extra.analytics.collect_dnt | default('false') %} + + +{% endif %} diff --git a/docs/value/img/classwise-shapley-density.svg b/docs/value/img/classwise-shapley-density.svg index 44d954546..03f9dda56 100644 --- a/docs/value/img/classwise-shapley-density.svg +++ b/docs/value/img/classwise-shapley-density.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:691145b1de07c07014ebfd6293e7683e180da3e6bcc880661e311b3e05ef8f8e +size 105919 diff --git a/docs/value/img/classwise-shapley-discounted-utility-function.svg b/docs/value/img/classwise-shapley-discounted-utility-function.svg index 70ed7ab58..79f4cb08c 100644 --- a/docs/value/img/classwise-shapley-discounted-utility-function.svg +++ b/docs/value/img/classwise-shapley-discounted-utility-function.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:6c09c0121e90dfe413e3e9ed694f2838443166e3a05b97c4968bdcf0e77e03fe +size 2957956 diff --git a/docs/value/img/classwise-shapley-metric-auc-cv.svg b/docs/value/img/classwise-shapley-metric-auc-cv.svg index 3ddc5f5a4..a18309b91 100644 --- a/docs/value/img/classwise-shapley-metric-auc-cv.svg +++ b/docs/value/img/classwise-shapley-metric-auc-cv.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:69f275b582bf6f62eeacafd1822e840ba279e8aceb630a49f54e4bd0ba3d0510 +size 29750 diff --git a/docs/value/img/classwise-shapley-metric-auc-mean.svg b/docs/value/img/classwise-shapley-metric-auc-mean.svg index 197ada82b..caf1afe8c 100644 --- a/docs/value/img/classwise-shapley-metric-auc-mean.svg +++ b/docs/value/img/classwise-shapley-metric-auc-mean.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:6e3f97d4ebc34387bd48b21185d45208320dac45cd69e521028d4fa064c2adf5 +size 29066 diff --git a/docs/value/img/classwise-shapley-metric-wad-cv.svg b/docs/value/img/classwise-shapley-metric-wad-cv.svg index 696226e83..de848ad3b 100644 --- a/docs/value/img/classwise-shapley-metric-wad-cv.svg +++ b/docs/value/img/classwise-shapley-metric-wad-cv.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:64f07361285b424d6f1feb4d3d7f906fbbc973ffa04af63979bbded6922ed6ef +size 32948 diff --git a/docs/value/img/classwise-shapley-metric-wad-mean.svg b/docs/value/img/classwise-shapley-metric-wad-mean.svg index 7f74a384a..316ca522b 100644 --- a/docs/value/img/classwise-shapley-metric-wad-mean.svg +++ b/docs/value/img/classwise-shapley-metric-wad-mean.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:1d0f374b656541a76a8948eade2baa0d09cec7acbafc62198f3564d77c64095f +size 34713 diff --git a/docs/value/img/classwise-shapley-roc-auc-logistic-regression.svg b/docs/value/img/classwise-shapley-roc-auc-logistic-regression.svg index 0ec200f83..b768d5c05 100644 --- a/docs/value/img/classwise-shapley-roc-auc-logistic-regression.svg +++ b/docs/value/img/classwise-shapley-roc-auc-logistic-regression.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:df411d298d1f613731117f2048d7b35e3bc7da35ead106b5636bc763f41d009b +size 130380 diff --git a/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-logistic-regression.svg b/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-logistic-regression.svg index 1071d5f0b..9b4cd3b84 100644 --- a/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-logistic-regression.svg +++ b/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-logistic-regression.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:89dbc3d228dc4ccb252f7dae850e2ad55f90620210556de7e28aaf9b419e1735 +size 272471 diff --git a/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-mlp.svg b/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-mlp.svg index 85a3244d8..425478819 100644 --- a/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-mlp.svg +++ b/docs/value/img/classwise-shapley-weighted-accuracy-drop-logistic-regression-to-mlp.svg @@ -1 +1,3 @@ - \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:f29ed746a2eeadfa5a5f167f771bc0f89754ce872bd80fb8ca4c7f39c2fa6e89 +size 332792 diff --git a/docs/value/img/mclc-best-removal-10k-natural.svg b/docs/value/img/mclc-best-removal-10k-natural.svg index 360e932f9..9f17ae566 100644 --- a/docs/value/img/mclc-best-removal-10k-natural.svg +++ b/docs/value/img/mclc-best-removal-10k-natural.svg @@ -1 +1,3 @@ -0.000.050.100.150.200.250.300.35Percentage Removal0.9700.9750.9800.9850.9900.995AccuracyTMC ShapleyGroup Testing ShapleyLeast CoreLeave One OutRandom \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:2048e3b5e0a8d6d4e363a4e8288c14fff7368eb3f3e3ab76f85deefa0c477ded +size 19955 diff --git a/docs/value/img/mclc-worst-removal-10k-natural.svg b/docs/value/img/mclc-worst-removal-10k-natural.svg index da04f1caa..4c367f0f6 100644 --- a/docs/value/img/mclc-worst-removal-10k-natural.svg +++ b/docs/value/img/mclc-worst-removal-10k-natural.svg @@ -1 +1,3 @@ -0.000.050.100.150.200.250.300.35Percentage Removal0.9900.9920.9940.9960.998AccuracyTMC ShapleyGroup Testing ShapleyLeast CoreLeave One OutRandom \ No newline at end of file +version https://git-lfs.github.com/spec/v1 +oid sha256:c03b777a2169d6de36ba654eb0187908ece2768397c04960a993483f7312b3eb +size 19144 diff --git a/docs/value/index.md b/docs/value/index.md index 2440428d3..6ddc974d0 100644 --- a/docs/value/index.md +++ b/docs/value/index.md @@ -7,9 +7,11 @@ alias: # Data valuation -!!! Note - If you want to jump right into the steps to compute values, skip ahead - to [Computing data values](#computing-data-values). +!!! Info + If you want to jump right into it, skip ahead to [Computing data + values][computing-data-values]. If you want a quick list of applications, + see [[data-valuation-applications]]. For a list of all algorithms + implemented in pyDVL, see [[methods]]. **Data valuation** is the task of assigning a number to each element of a training set which reflects its contribution to the final performance of some @@ -35,7 +37,7 @@ function of three factors: pyDVL collects algorithms for the computation of data values in this sense, mostly those derived from cooperative game theory. The methods can be found in -the package [pydvl.value][pydvl.value] , with support from modules +the package [[pydvl.value]], with support from modules [pydvl.utils.dataset][pydvl.utils.dataset] and [pydvl.utils.utility][pydvl.utils.utility], as detailed below. @@ -44,7 +46,7 @@ and [pydvl.utils.utility][pydvl.utils.utility], as detailed below. [the difficulties using data values][problems-of-data-values]. There are three main families of methods for data valuation: game-theoretic, -influence-based and intrinsic. As of v0.7.0 pyDVL supports the first two. Here, +influence-based and intrinsic. As of v0.8.1 pyDVL supports the first two. Here, we focus on game-theoretic concepts and refer to the main documentation on the [influence funtion][the-influence-function] for the second. @@ -55,7 +57,7 @@ values](shapley.md]) [@ghorbani_data_2019], [@kwon_efficient_2021], [@schoch_csshapley_2022], their generalization to so-called [semi-values](semi-values.md) by [@kwon_beta_2022] and [@wang_data_2022], and [the Core](the-core.md) [@yan_if_2021]. All of these are implemented -in pyDVL. +in pyDVL. For a full list see [[methods]] In these methods, data points are considered players in a cooperative game whose outcome is the performance of the model when trained on subsets @@ -83,34 +85,7 @@ among all samples, failing to identify repeated ones as unnecessary, with e.g. a zero value. -## Applications of data valuation - -Many applications are touted for data valuation, but the results can be -inconsistent. Values have a strong dependency on the training procedure and the -performance metric used. For instance, accuracy is a poor metric for imbalanced -sets and this has a stark effect on data values. Some models exhibit great -variance in some regimes and this again has a detrimental effect on values. - -Nevertheless, some of the most promising applications are: - -* Cleaning of corrupted data. -* Pruning unnecessary or irrelevant data. -* Repairing mislabeled data. -* Guiding data acquisition and annotation (active learning). -* Anomaly detection and model debugging and interpretation. - -Additionally, one of the motivating applications for the whole field is that of -data markets: a marketplace where data owners can sell their data to interested -parties. In this setting, data valuation can be key component to determine the -price of data. Algorithm-agnostic methods like LAVA [@just_lava_2023] are -particularly well suited for this, as they use the Wasserstein distance between -a vendor's data and the buyer's to determine the value of the former. - -However, this is a complex problem which can face practical banal problems like -the fact that data owners may not wish to disclose their data for valuation. - - -## Computing data values +## Computing data values { #computing-data-values } Using pyDVL to compute data values is a simple process that can be broken down into three steps: @@ -202,9 +177,9 @@ utility = Utility(model, dataset, "explained_variance") `Utility` will wrap the `fit()` method of the model to cache its results. This greatly reduces computation times of Monte Carlo methods. Because of how caching is implemented, it is important not to reuse `Utility` objects for different -datasets. You can read more about [setting up the cache][setting-up-the-cache] -in the installation guide and the documentation -of the [caching][pydvl.utils.caching] module. +datasets. You can read more about [setting up the cache][getting-started-cache] in the +installation guide, and in the documentation of the +[caching][pydvl.utils.caching] module. #### Using custom scorers @@ -249,7 +224,7 @@ from sklearn.linear_model import LinearRegression, LogisticRegression from sklearn.datasets import load_iris dataset = Dataset.from_sklearn(load_iris()) -u = Utility(LogisticRegression(), dataset, enable_cache=False) +u = Utility(LogisticRegression(), dataset) training_budget = 3 wrapped_u = DataUtilityLearning(u, training_budget, LinearRegression()) @@ -276,7 +251,7 @@ $$v_u(i) = u(D) − u(D_{-i}).$$ For notational simplicity, we consider the valuation function as defined over the indices of the dataset $D$, and $i \in D$ is the index of the sample, $D_{-i}$ is the training set without the sample $x_i$, and $u$ is the utility -function. +function. See [the section on notation][notation-and-nomenclature] for more. For the purposes of data valuation, this is rarely useful beyond serving as a baseline for benchmarking. Although in some benchmarks it can perform @@ -306,6 +281,26 @@ There are a number of factors that affect how useful values can be for your project. In particular, regression can be especially tricky, but the particular nature of every (non-trivial) ML problem can have an effect: +* **Variance of the utility**: Classical applications of game theoretic value + concepts operate with deterministic utilities, as do many of the bounds in the + literature. But in ML we use an evaluation of the model on a validation set as a + proxy for the true risk. Even if the utility is bounded, its variance will + affect final values, and even more so any Monte Carlo estimates. + Several works have tried to cope with variance. [@wang_data_2022] prove that by + relaxing one of the Shapley axioms and considering the general class of + semi-values, of which Shapley is an instance, one can prove that a choice of + constant weights is the best one can do in a utility-agnostic setting. This + method, dubbed *Data Banzhaf*, is available in pyDVL as + [compute_banzhaf_semivalues][pydvl.value.semivalues.compute_banzhaf_semivalues]. + + ??? tip "Averaging repeated utility evaluations" + One workaround in pyDVL is to configure the caching system to allow multiple + evaluations of the utility for every index set. A moving average is + computed and returned once the standard error is small, see + [CachedFuncConfig][pydvl.utils.caching.config.CachedFuncConfig]. Note + however that in practice, the likelihood of cache hits is low, so one + would have to force recomputation manually somehow. + * **Unbounded utility**: Choosing a scorer for a classifier is simple: accuracy or some F-score provides a bounded number with a clear interpretation. However, in regression problems most scores, like $R^2$, are not bounded because @@ -336,21 +331,6 @@ nature of every (non-trivial) ML problem can have an effect: These squashed scores can prove useful in regression problems, but they can also introduce issues in the low-value regime. -* **High variance utility**: Classical applications of game theoretic value - concepts operate with deterministic utilities, but in ML we use an evaluation - of the model on a validation set as a proxy for the true risk. Even if the - utility *is* bounded, if it has high variance then values will also have high - variance, as will their Monte Carlo estimates. One workaround in pyDVL is to - configure the caching system to allow multiple evaluations of the utility for - every index set. A moving average is computed and returned once the standard - error is small, see [MemcachedConfig][pydvl.utils.config.MemcachedConfig]. - [@wang_data_2022] prove that by relaxing one of the Shapley axioms - and considering the general class of semi-values, of which Shapley is an - instance, one can prove that a choice of constant weights is the best one can - do in a utility-agnostic setting. This method, dubbed *Data Banzhaf*, is - available in pyDVL as - [compute_banzhaf_semivalues][pydvl.value.semivalues.compute_banzhaf_semivalues]. - * **Data set size**: Computing exact Shapley values is NP-hard, and Monte Carlo approximations can converge slowly. Massive datasets are thus impractical, at least with [game-theoretical methods][game-theoretical-methods]. A workaround @@ -359,12 +339,46 @@ nature of every (non-trivial) ML problem can have an effect: worked-out [example here](../examples/shapley_basic_spotify). Some algorithms also provide different sampling strategies to reduce the variance, but due to a no-free-lunch-type theorem, no single strategy can be optimal for all utilities. + Finally, model specific methods like + [kNN-Shapley][pydvl.value.shapley.knn.knn_shapley] [@jia_efficient_2019a], or + altogether different and typically faster approaches like + [Data-OOB][pydvl.value.oob.compute_data_oob] [@kwon_dataoob_2023] can also be + used. * **Model size**: Since every evaluation of the utility entails retraining the whole model on a subset of the data, large models require great amounts of computation. But also, they will effortlessly interpolate small to medium datasets, leading to great variance in the evaluation of performance on the dedicated validation set. One mitigation for this problem is cross-validation, - but this would incur massive computational cost. As of v.0.7.0 there are no + but this would incur massive computational cost. As of v0.8.1 there are no facilities in pyDVL for cross-validating the utility (note that this would require cross-validating the whole value computation). + + +## Notation and nomenclature { #notation-and-nomenclature } + +!!! todo + Organize this section better and use its content consistently throughout the + documentation. + +The following notation is used throughout the documentation: + +Let $D = \{x_1, \ldots, x_n\}$ be a training set of $n$ samples. + +The utility function $u:\mathcal{D} \rightarrow \mathbb{R}$ maps subsets of $D$ +to real numbers. In pyDVL, we typically call this mapping a **score** for +consistency with sklearn, and reserve the term **utility** for the triple of +dataset $D$, model $f$ and score $u$, since they are used together to compute +the value. + +The value $v$ of the $i$-th sample in dataset $D$ wrt. utility $u$ is +denoted as $v_u(x_i)$ or simply $v(i)$. + +For any $S \subseteq D$, we denote by $S_{-i}$ the set of samples in $D$ +excluding $x_i$, and $S_{+i}$ denotes the set $S$ with $x_i$ added. + +The marginal utility of adding sample $x_i$ to a subset $S$ is denoted as +$\delta(i) := u(S_{+i}) - u(S)$. + +The set $D_{-i}^{(k)}$ contains all subsets of $D$ of size $k$ that do not +include sample $x_i$. diff --git a/docs/value/notation.md b/docs/value/notation.md deleted file mode 100644 index 83054d5e6..000000000 --- a/docs/value/notation.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -title: Notation for valuation ---- - -# Notation for valuation - -!!! todo - Organize this page better and use its content consistently throughout the - documentation. - -The following notation is used throughout the documentation: - -Let $D = \{x_1, \ldots, x_n\}$ be a training set of $n$ samples. - -The utility function $u:\mathcal{D} \rightarrow \mathbb{R}$ maps subsets of $D$ -to real numbers. In pyDVL, we typically call this mappin a **score** for -consistency with sklearn, and reserve the term **utility** for the triple of -dataset $D$, model $f$ and score $u$, since they are used together to compute -the value. - -The value $v$ of the $i$-th sample in dataset $D$ wrt. utility $u$ is -denoted as $v_u(x_i)$ or simply $v(i)$. - -For any $S \subseteq D$, we denote by $S_{-i}$ the set of samples in $D$ -excluding $x_i$, and $S_{+i}$ denotes the set $S$ with $x_i$ added. - -The marginal utility of adding sample $x_i$ to a subset $S$ is denoted as -$\delta(i) := u(S_{+i}) - u(S)$. - -The set $D_{-i}^{(k)}$ contains all subsets of $D$ of size $k$ that do not -include sample $x_i$. diff --git a/docs/value/semi-values.md b/docs/value/semi-values.md index 2aebe0d80..408a90c28 100644 --- a/docs/value/semi-values.md +++ b/docs/value/semi-values.md @@ -27,7 +27,7 @@ rank stability in certain situations. !!! Note Shapley values are a particular case of semi-values and can therefore also - be computed with the methods described here. However, as of version 0.7.0, + be computed with the methods described here. However, as of version 0.8.1, we recommend using [compute_shapley_values][pydvl.value.shapley.compute_shapley_values] instead, in particular because it implements truncation policies for TMCS. @@ -141,10 +141,10 @@ of rank stability, across a range of models and datasets [@wang_data_2022]. sampling that reuses the utility computation from the last iteration when iterating over a permutation. This doubles the computation requirements (and slightly increases variance) when using permutation sampling, unless [the - cache](getting-started/installation.md#setting-up-the-cache) is enabled. - In addition, as mentioned above, + cache is enabled][getting-started-cache]. In addition, + as mentioned above, [truncation policies][pydvl.value.shapley.truncated.TruncationPolicy] are - not supported by this generic implementation (as of v0.7.0). For these + not supported by this generic implementation (as of v0.8.1). For these reasons it is preferable to use [compute_shapley_values][pydvl.value.shapley.common.compute_shapley_values] whenever not computing other semi-values. diff --git a/docs/value/the-core.md b/docs/value/the-core.md index 9c4e4bb3b..5a44a8cd1 100644 --- a/docs/value/the-core.md +++ b/docs/value/the-core.md @@ -4,48 +4,46 @@ title: The Least Core for Data Valuation # Core values -The Shapley values define a fair way to distribute payoffs amongst all -participants when they form a grand coalition. But they do not consider -the question of stability: under which conditions do all participants -form the grand coalition? Would the participants be willing to form -the grand coalition given how the payoffs are assigned, -or would some of them prefer to form smaller coalitions? - -The Core is another approach to computing data values originating -in cooperative game theory that attempts to ensure this stability. -It is the set of feasible payoffs that cannot be improved upon -by a coalition of the participants. +Shapley values define a fair way to distribute payoffs amongst all participants +(training points) when they form a grand coalition, i.e. when the model is +trained on the whole dataset. But they do not consider the question of +stability: under which conditions do all participants in a game form the grand +coalition? Are the payoffs distributed in such a way that prioritizes its +formation? + +The Core is another solution concept in cooperative game theory that attempts to +ensure stability in the sense that it provides the set of feasible payoffs that +cannot be improved upon by a sub-coalition. This can be interesting for some +applications of data valuation because it yields values consistent with training +on the whole dataset, avoiding the spurious selection of subsets. It satisfies the following 2 properties: - **Efficiency**: - The payoffs are distributed such that it is not possible - to make any participant better off - without making another one worse off. - $$\sum_{i\in D} v(i) = u(D)\,$$ + The payoffs are distributed such that it is not possible to make any + participant better off without making another one worse off. + $\sum_{i \in D} v(i) = u(D).$ - **Coalitional rationality**: - The sum of payoffs to the agents in any coalition S is at - least as large as the amount that these agents could earn by - forming a coalition on their own. - $$\sum_{i \in S} v(i) \geq u(S), \forall S \subset D\,$$ + The sum of payoffs to the agents in any coalition $S$ is at least as large as + the amount that these agents could earn by forming a coalition on their own. + $\sum_{i \in S} v(i) \geq u(S), \forall S \subset D.$ -The second property states that the sum of payoffs to the agents -in any subcoalition $S$ is at least as large as the amount that -these agents could earn by forming a coalition on their own. +The Core was first introduced into data valuation by [@yan_if_2021], in the +following form. ## Least Core values -Unfortunately, for many cooperative games the Core may be empty. -By relaxing the coalitional rationality property by a subsidy $e \gt 0$, -we are then able to find approximate payoffs: +Unfortunately, for many cooperative games the Core may be empty. By relaxing the +coalitional rationality property by a subsidy $e \gt 0$, we are then able to +find approximate payoffs: $$ \sum_{i\in S} v(i) + e \geq u(S), \forall S \subset D, S \neq \emptyset \ ,$$ -The least core value $v$ of the $i$-th sample in dataset $D$ wrt. -utility $u$ is computed by solving the following Linear Program: +The Least Core (LC) values $\{v\}$ for utility $u$ are computed by solving the +following linear program: $$ \begin{array}{lll} @@ -55,11 +53,20 @@ $$ \end{array} $$ +Note that solving this program yields a _set of solutions_ $\{v_j:N \rightarrow +\mathbb{R}\}$, whereas the Shapley value is a single function $v$. In order to +obtain a single valuation to use, one breaks ties by solving a quadratic program +to select the $v$ in the LC with the smallest $\ell_2$ norm. This is called the +_egalitarian least core_. + ## Exact Least Core -This first algorithm is just a verbatim implementation of the definition. -As such it returns as exact a value as the utility function allows -(see what this means in Problems of Data Values][problems-of-data-values]). +This first algorithm is just a verbatim implementation of the definition, in +[compute_least_core_values][pydvl.value.least_core.compute_least_core_values]. +It computes all constraints for the linear problem by evaluating the utility on +every subset of the training data, and returns as exact a value as the utility +function allows (see what this means in [Problems of Data +Values][problems-of-data-values]). ```python from pydvl.value import compute_least_core_values @@ -72,18 +79,20 @@ values = compute_least_core_values(utility, mode="exact") Because the number of subsets $S \subseteq D \setminus \{i\}$ is $2^{ | D | - 1 }$, one typically must resort to approximations. -The simplest approximation consists in using a fraction of all subsets for the -constraints. [@yan_if_2021] show that a quantity of order -$\mathcal{O}((n - \log \Delta ) / \delta^2)$ is enough to obtain a so-called -$\delta$-*approximate least core* with high probability. I.e. the following -property holds with probability $1-\Delta$ over the choice of subsets: +The simplest one consists in using a fraction of all subsets for the constraints. +[@yan_if_2021] show that a quantity of order $\mathcal{O}((n - \log \Delta ) / +\delta^2)$ is enough to obtain a so-called $\delta$-*approximate least core* +with high probability. I.e. the following property holds with probability +$1-\Delta$ over the choice of subsets: $$ \mathbb{P}_{S\sim D}\left[\sum_{i\in S} v(i) + e^{*} \geq u(S)\right] \geq 1 - \delta, $$ -where $e^{*}$ is the optimal least core subsidy. +where $e^{*}$ is the optimal least core subsidy. This approximation is +also implemented in +[compute_least_core_values][pydvl.value.least_core.compute_least_core_values]: ```python from pydvl.value import compute_least_core_values diff --git a/docs_includes/abbreviations.md b/docs_includes/abbreviations.md index a89425885..a584b73d8 100644 --- a/docs_includes/abbreviations.md +++ b/docs_includes/abbreviations.md @@ -1,20 +1,29 @@ +*[AM]: Arnoldi Method +*[BCG]: Block Conjugate Gradient +*[CG]: Conjugate Gradient *[CSP]: Constraint Satisfaction Problem *[CV]: Coefficient of Variation *[CWS]: Class-wise Shapley *[DUL]: Data Utility Learning +*[EKFAC]: Eigenvalue-corrected Kronecker-Factored Approximate Curvature *[GT]: Group Testing *[IF]: Influence Function *[iHVP]: inverse Hessian-vector product +*[K-FAC]: Kronecker-Factored Approximate Curvature *[LC]: Least Core -*[LiSSA]: Linear-time Stochastic Second-order Algorithm *[LOO]: Leave-One-Out +*[LiSSA]: Linear-time Stochastic Second-order Algorithm *[MCLC]: Monte Carlo Least Core *[MCS]: Monte Carlo Shapley -*[ML]: Machine Learning *[MLP]: Multi-Layer Perceptron *[MLRC]: Machine Learning Reproducibility Challenge +*[ML]: Machine Learning *[MSE]: Mean Squared Error +*[NLRA]: Nyström Low-Rank Approximation +*[OOB]: Out-of-Bag *[PCA]: Principal Component Analysis +*[PBCG]: Preconditioned Block Conjugate Gradient +*[PCG]: Preconditioned Conjugate Gradient *[ROC]: Receiver Operating Characteristic *[SV]: Shapley Value *[TMCS]: Truncated Monte Carlo Shapley diff --git a/logo.svg b/logo.svg index 3a846f7e2..e66d4ecdd 100644 --- a/logo.svg +++ b/logo.svg @@ -1,201 +1 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index dace2fa73..1e4738720 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,11 +1,49 @@ site_name: "pyDVL" site_dir: "docs_build" -site_url: "https://aai-institute.github.io/pyDVL/" +site_url: "https://pydvl.org" repo_name: "aai-institute/pyDVL" repo_url: "https://github.com/aai-institute/pyDVL" copyright: "Copyright © AppliedAI Institute gGmbH" remote_branch: gh-pages +nav: + - Home: index.md + - Getting Started: + - getting-started/index.md + - First steps: getting-started/first-steps.md + - Applications: getting-started/applications.md + - Benchmarking: getting-started/benchmarking.md + - Methods: getting-started/methods.md + - Advanced usage: getting-started/advanced-usage.md + - Glossary: getting-started/glossary.md + - Data Valuation: + - value/index.md + - Shapley values: value/shapley.md + - Semi-values: value/semi-values.md + - The Core: value/the-core.md + - Class-wise Shapley: value/classwise-shapley.md + - The Influence Function: + - influence/index.md + - Influence Function Model: influence/influence_function_model.md + - Scaling Computation: influence/scaling_computation.md + - Examples: + - examples/index.md + - Data Valuation: + - Shapley values: examples/shapley_basic_spotify.ipynb + - KNN Shapley: examples/shapley_knn_flowers.ipynb + - Data utility learning: examples/shapley_utility_learning.ipynb + - Least Core: examples/least_core_basic.ipynb + - Data OOB: examples/data_oob.ipynb + - Influence Function: + - For CNNs: examples/influence_imagenet.ipynb + - For mislabeled data: examples/influence_synthetic.ipynb + - For outlier detection: examples/influence_wine.ipynb + - For language models: examples/influence_sentiment_analysis.ipynb + - Code: + - API Reference: api/pydvl/ + - Changelog: CHANGELOG.md + - Development Guidelines: CONTRIBUTING.md + watch: - src/pydvl - notebooks @@ -13,9 +51,11 @@ watch: hooks: - build_scripts/copy_notebooks.py - build_scripts/copy_changelog.py + - build_scripts/copy_contributing_guide.py - build_scripts/modify_binder_link.py plugins: + - search - autorefs - glightbox: touchNavigation: true @@ -33,10 +73,8 @@ plugins: - macros - mike: canonical_version: stable - - search - section-index - alias: - use_relative_link: true verbose: true - gen-files: scripts: @@ -72,8 +110,10 @@ plugins: - https://joblib.readthedocs.io/en/stable/objects.inv - https://docs.dask.org/en/latest/objects.inv - https://distributed.dask.org/en/latest/objects.inv + - https://docs.ray.io/en/latest/objects.inv paths: [ src ] # search packages in the src folder options: + heading_level: 1 inherited_members: true docstring_style: google docstring_section_style: spacy @@ -81,11 +121,16 @@ plugins: show_bases: true members_order: source show_submodules: false - show_signature_annotations: false + separate_signature: true + show_signature_annotations: true signature_crossrefs: true merge_init_into_class: true docstring_options: ignore_init_summary: true + show_symbol_type_toc: true + show_symbol_type_heading: true + show_root_full_path: true + show_root_heading: true - bibtex: bib_file: "docs/assets/pydvl.bib" csl_file: "docs/assets/elsevier-harvard.csl" @@ -94,6 +139,8 @@ plugins: enable_creation_date: true type: iso_date fallback_to_build_date: true + - social: + cards: !ENV [CI, True] # only build in CI theme: name: material @@ -106,18 +153,26 @@ theme: - content.code.annotate - content.code.copy - navigation.footer -# - content.tooltips # insiders only + - content.tooltips # - navigation.indexes - navigation.instant - navigation.path -# - navigation.sections -# - navigation.tabs + - navigation.sections + - navigation.tabs - navigation.top - navigation.tracking - search.suggest - search.highlight - toc.follow - palette: # Palette toggle for light mode + palette: + # Palette toggle for automatic mode + - media: "(prefers-color-scheme)" + scheme: default + primary: teal + toggle: + icon: material/brightness-auto + name: Switch to light mode + # Palette toggle for light mode - media: "(prefers-color-scheme: light)" scheme: default primary: teal @@ -130,11 +185,11 @@ theme: primary: teal toggle: icon: material/brightness-4 - name: Switch to light mode + name: Switch to system preference extra_css: - css/extra.css - - css/neoteroi.css + - css/grid-cards.css extra_javascript: - javascripts/mathjax.js @@ -143,8 +198,7 @@ extra_javascript: extra: transferlab: - website: https://transferlab.appliedai.de - data_valuation_review: https://transferlab.appliedai.de/reviews/data-valuation + website: https://transferlab.ai/ copyright_link: https://appliedai-institute.de version: provider: mike @@ -158,15 +212,21 @@ extra: link: https://twitter.com/aai_transferlab - icon: fontawesome/brands/linkedin link: https://de.linkedin.com/company/appliedai-institute-for-europe-ggmbh + analytics: + provider: simpleanalytics + hostname: pydvl.org + script_domain: scripts.simpleanalyticscdn.com + noscript_domain: queue.simpleanalyticscdn.com + collect_dnt: true markdown_extensions: - abbr - admonition - attr_list + - def_list - footnotes - markdown_captions - md_in_html - - neoteroi.cards - codehilite - toc: permalink: True @@ -174,8 +234,8 @@ markdown_extensions: - pymdownx.tabbed: alternate_style: true - pymdownx.emoji: - emoji_index: !!python/name:materialx.emoji.twemoji - emoji_generator: !!python/name:materialx.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg - pymdownx.highlight: anchor_linenums: true pygments_lang_class: true @@ -188,34 +248,3 @@ markdown_extensions: - docs_includes/abbreviations.md - pymdownx.superfences - pymdownx.details - -nav: - - Home: index.md - - Getting Started: - - Installation: getting-started/installation.md - - First steps: getting-started/first-steps.md - - Data Valuation: - - Introduction: value/index.md - - Notation: value/notation.md - - Shapley values: value/shapley.md - - Semi-values: value/semi-values.md - - The core: value/the-core.md - - Classwise Shapley: value/classwise-shapley.md - - Examples: - - Shapley values: examples/shapley_basic_spotify.ipynb - - KNN Shapley: examples/shapley_knn_flowers.ipynb - - Data utility learning: examples/shapley_utility_learning.ipynb - - Least Core: examples/least_core_basic.ipynb - - Data OOB: examples/data_oob.ipynb - - The Influence Function: - - Introduction: influence/index.md - - Influence Function Model: influence/influence_function_model.md - - Scaling Computation: influence/scaling_computation.md - - Examples: - - For CNNs: examples/influence_imagenet.ipynb - - For mislabeled data: examples/influence_synthetic.ipynb - - For outlier detection: examples/influence_wine.ipynb - - For sentiment analysis: examples/influence_sentiment_analysis.ipynb - - Code: - - Changelog: CHANGELOG.md - - API: api/pydvl/ diff --git a/notebooks/influence_imagenet.ipynb b/notebooks/influence_imagenet.ipynb index ab3bc982f..fc33dbc64 100644 --- a/notebooks/influence_imagenet.ipynb +++ b/notebooks/influence_imagenet.ipynb @@ -392,7 +392,7 @@ "source": [ "## Influence computation\n", "\n", - "Let's now calculate influences! The central interface for computing influences is [Influence][pydvl.influence.base_influence_model.Influence]. Since Resnet18 is quite big, we pick the conjugate gradient implementation [BatchCgInfluence][pydvl.influence.torch.BatchCgInfluence], which takes a trained [torch.nn.Module][torch.nn.Module], the training loss and the training data.\n", + "Let's now calculate influences! The central interface for computing influences is [InfluenceFunctionModel][pydvl.influence.base_influence_function_model.InfluenceFunctionModel]. Since Resnet18 is quite big, we pick the conjugate gradient implementation [CgInfluence][pydvl.influence.torch.CgInfluence], which takes a trained [torch.nn.Module][torch.nn.Module], the training loss and the training data.\n", "Other important parameters are the Hessian regularization term, which should be chosen as small as possible for the computation to converge (further details on why this is important can be found in the [Appendix](#appendix))." ] }, @@ -410,7 +410,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "On the instantiated influence object, we can call the method [values][pydvl.influence.torch.BatchCgInfluence.values], which takes some test data and some input dataset with labels (which typically is the training data, or a subset of it). The influence type will be `up`. The other option, `perturbation`, is beyond the scope of this notebook, but more info can be found in the notebook [using the Wine dataset](influence_wine.ipynb) or in the documentation for pyDVL." + "On the instantiated influence object, we can call the method [influences][pydvl.influence.torch.CgInfluence.influences], which takes some test data and some input dataset with labels (which typically is the training data, or a subset of it). The influence type will be `up`. The other option, `perturbation`, is beyond the scope of this notebook, but more info can be found in the notebook [using the Wine dataset](influence_wine.ipynb) or in the documentation for pyDVL." ] }, { diff --git a/notebooks/influence_wine.ipynb b/notebooks/influence_wine.ipynb index a0f46fcae..8e226f047 100644 --- a/notebooks/influence_wine.ipynb +++ b/notebooks/influence_wine.ipynb @@ -74,8 +74,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/fabio/miniconda3/envs/pydvl_env/lib/python3.9/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" + "/opt/homebrew/Caskroom/mambaforge/base/envs/pydvl/lib/python3.8/site-packages/transformers/utils/generic.py:441: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead.\n", + " _torch_pytree._register_pytree_node(\n" ] } ], @@ -97,6 +97,8 @@ " CgInfluence,\n", " ArnoldiInfluence,\n", " EkfacInfluence,\n", + " NystroemSketchInfluence,\n", + " LissaInfluence,\n", ")\n", "from support.shapley import load_wine_dataset\n", "from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay, f1_score\n", @@ -275,11 +277,18 @@ }, "outputs": [ { - "name": "stderr", - "output_type": "stream", - "text": [ - "Model fitting: 100%|██████████| 300/300 [00:01<00:00, 209.95it/s]\n" - ] + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "dc79c95e3db747dc8e8fee32dde79f22", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Model fitting: 0%| | 0/300 [00:00" ] @@ -371,7 +380,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAysAAAKvCAYAAABqAVAmAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABBiklEQVR4nO3deZilZ1kn/u/dne7O0p3ORjbCkgQIA8FhiWxGBGMQEQVE1FFk0RmEQYdtmDH+MEJQcBgmgxpGBRWCKAoICEoIsgkoBAiGJSYhkAQI2bdOJ53eqp7fH+c0VKq7uruW95y3Tn8+1/Ve1e/zLs9dlZyk7r6fpVprAQAA6JsV4w4AAABgVyQrAABAL0lWAACAXpKsAAAAvSRZAQAAekmyAgAA9JJkBQAA6CXJCgAA0Ev7jTuApVRVleTYJBvHHQsAAN+zLsk1bR/bjbyq9k+yekzdb22tbR5T30tmopKVDBKVq8cdBAAAOzkuyXfHHcSoVNX+Rx+58q7rbpgaVwjXVdXxyz1hmbRkZWOSfOtL983Ba41wg73x9Ac8ZNwhADDBtmdbPpMPJfveyJfV190wlW9deN8cvG60v5fevnE693nEVUdnUNWRrPTNwWtXjPxfCliu9qtV4w4BgEm2Tw382tnadZW162qkfU5ntP11yW/0AABAL0lWAACAXprIYWAAANAHU206UyMeCjfVpkfbYYdUVgAAgF5SWQEAgI5Mp2V6xKsMjLq/LqmsAAAAvaSyAgAAHZnOdEY9g2T0PXZHZQUAAOglyQoAANBLhoEBAEBHplrLVBvthPdR99cllRUAAKCXVFYAAKAjli5eHJUVAACglyQrAABALxkGBgAAHZlOy5RhYAumsgIAAPSSygoAAHTEBPvFUVkBAAB6SWUFAAA6YlPIxVFZAQAAekmyAgAA9JJhYAAA0JHp4THqPieFygoAAOzDquqMqvpCVW2sqhuq6v1VddKse/avqjdV1c1VdUdV/V1VHbWH91ZVnVVV11bVXVX10aq6/3xik6wAAEBHpoabQo76mKcfSfKmJI9OcnqSVUk+UlUHzbjn/yb5qSTPHN5/bJL37uG9/yPJf0vygiSPSnJnkvOrav+9DcwwMAAA2Ie11p4087yqnpvkhiSPSPKpqlqf5FeT/GJr7ePDe56X5JKqenRr7XOz31lVleQlSX63tfb3w7ZnJ7k+ydOS/M3exKayAgAAk2ldVR0841izl8+tH369Zfj1ERlUWz6644bW2qVJvp3kMXO84/gkR896ZkOSC3bzzE5UVgAAoCNTbXCMus+hq2ddenWSV+3u2apakeSNSf6ltfa1YfPRSba21m6bdfv1w2u7cvSMe/b2mZ1IVgAAYDIdl2TjjPMte/HMm5KcnOTUTiKaJ8kKAAB0ZMxLF29srd2+t89V1TlJnpLkca21mVWZ65KsrqpDZlVXjhpe25XrZtxz7axnLtrbmMxZAQCAfdhwieFzkjw9yY+21q6cdcuFSbYlOW3GMycluXeSz87x2iszSFhmPnNwBquCzfXMTlRWAACgI9OpTKVG3uc8vSnJLyZ5apKNVbVjTsmG1tpdrbUNVfXnSc6uqluS3J7kj5J8duZKYFV1aZIzWmvva621qnpjkldW1eUZJC+vSXJNkvfvbWCSFQAA2Le9cPj1k7Pan5fkbcM/vzSDEWZ/l2RNkvOT/NdZ95+U768kliSvT3JQkjcnOSTJZ5I8qbW2eW8Dk6wAAMA+rLW2x1LMMMF40fDYq/e01lqSM4fHgkhWAACgI9NtcIy6z0lhgj0AANBLKisAANCRqTFMsB91f11SWQEAAHpJsgIAAPSSYWAAANARw8AWR2UFAADoJZUVAADoyHSrTO95G5Ml73NSqKwAAAC9pLICAAAdMWdlcVRWAACAXpKsAAAAvWQYGAAAdGQqKzI14vrA1Eh765bKCgAA0EsqKwAA0JE2hqWLm6WLAQAAuiVZAQAAeskwMAAA6Ih9VhZHZQUAAOgllRUAAOjIVFuRqTbipYvbSLvrlMoKAADQSyorAADQkelUpkdcH5jO5JRWVFYAAIBekqwAAAC9ZBgYAAB0xNLFi6OyAgAA9JLKCgAAdGQ8SxebYA8AANApyQoAANBLhoEBAEBHBvusjHbC+6j765LKCgAA0EsqKwAA0JHprMiUHewXTGUFAADoJckKAADQS4aBAQBAR+yzsjgqKwAAQC+prAAAQEemsyLTJtgvmMoKAADQSyorAADQkalWmWqj3aRx1P11SWUFAADoJckKAADQS4aBAQBAR6bGsIP9lAn2AAAA3VJZAQCAjky3FZke8aaQ0zaFBAAA6JZkBQAA6CXDwAAAoCMm2C+OygoAANBLKisAANCR6Yx+R/npkfbWLZUVAACgl1RWAACgI9NZkekR1wdG3V+XJuc7AQAAJopkBQAA6CXDwAAAoCNTbUWmRryD/aj769LkfCcAAMBEUVkBAICOTKcynVEvXTza/rqksgIAAPSSZAUAAOglw8AAAKAjJtgvjmSFRbvsogPyT+86LF/+17W5/jurc/ChU3ngI+7Mc//HdTnuxC3fu+/Sfzsw//Suw3Lplw7MlZcckKntlfOvuWh8gUMPrVo9nWe/4rqc9oxbs3b9VK685ICc+/qj86VPrRt3aNBLPjMw2SYn7WJs3vWmo/IvHzokDzv1jrzwrO/myc+6OV/73Nq86McfkKsu3f97933hYwfnw399WKqSY+69ZTdvhH3Xy9/4nfzM82/Mx993aP74zHtmejp5zV9ekQc/8o5xhwa95DND301lxViOSVGttXHHsGSq6uAkG279+gk5eN3k/EPqu4u/cGAe8B/vyqrV3/936btXrM6vnfbA/PBP3pb/ec63kyS33rhfDlw7lTUHtJzzW/fMB992D5WVHvjxYx867hAYOumhm/KHH7o8bznrmLznT45MkqxaM503f/yy3HbzfnnpT99/zBFCv/jMLA/b27Z8Mn+fJOtba7ePO55R2fF76Ru+eGoOWDvawUx33bE9//2UzyQT8DPv5W/0VfWiqrqqqjZX1QVV9chxx8TcHvyDm+6WqCTJPU/Ymvs8YHO+ffn3KyuH3mN71hwwOckxLLVTn3JbprYnH3rH4d9r27ZlRT78zsPyoFM25R7Hbh1jdNA/PjMsB9OtxnLMR1U9rqo+WFXXVFWrqqfNut7mOF6xm3e+ahf3Xzrfn1/vkpWq+vkkZyd5dZKHJ/lykvOr6sixBsa8tJbcdtN+WX/Y9nGHAsvG/U6+K1dfsSab7lh5t/bLLjowSXLCg+8aR1jQWz4zsGQOyuB37hfNcf2YWcevJGlJ/m4P77141nOnzjewPk6wf1mSt7TW3pokVfWCJD+ZwQ/l98cZGHvv4+89NDdduzrP/u/XjTsUWDYOO3J7brl+1U7tO9oOP0ryDzP5zMDSaK2dl+S8JKnauSrTWrvbL3RV9dQkn2itXbGHV2+f/ex89SpZqarVSR6R5HU72lpr01X10SSP2cX9a5KsmdFk6Y8e+Pbla3LObx2X//CIO/NjP3fLuMOBZWP1/tPZtnXn/0ls3TJoW7P/9KhDgl7zmWE5mB7DhPfp7/e3blbysaW1tqhVjqrqqAwKCc/Zi9vvX1XXJNmc5LNJzmitfXs+/fVtGNgRSVYmuX5W+/VJjt7F/Wck2TDjuLrT6NijW27YL2c++4QctG4qv/2WK7Ny5Z6fAQa2bl6x0/yvJFm9ZtC2ZXPf/pMN4+UzA3t0de7+u/IZS/DO5yTZmOS9e7jvgiTPTfKkJC9McnyST1fVvIoLvaqsLMDrMpjfssO6SFjG5s7bV+SVv3RC7rh9Zf7P+y7P4Ucrv8N83HLDfjn86G07tR921KDt5uuX+3+yYWn5zLAcTLcVmR7xJo0z+jsug8Rih6XYO+JXkvxVa23z7m4aDi3b4StVdUGSbyX5uSR/vred9e2vHG5KMpXkqFntRyXZabxba21La+32HUfu/g+DEdq6uXLmc07I1VesyVnnXpH7PMA+KjBf37z4gBx3wpYcuHbqbu0PfNimJMkVFx8wjrCgt3xmYI82zvxdeQmGgP1wkpOS/Nl8n22t3Zbk60nuN5/nepWstNa2JrkwyWk72qpqxfD8s+OKi92bmkp+7wX3zSUXHpRXvvmqPOiUTeMOCZalT//D+qzcL3nys27+Xtuq1dN54s/fkksuPDA3XrN6jNFB//jMwMj9apILW2tfnu+DVbU2yYlJrp3Pc32sj56d5Nyq+mKSzyd5SQbLqb11nEExtze/+p753EfW59Gnb8jG2/bLx/7u0LtdP+0ZtyZJrr96VT72nsOSJJd/ZbCs5F+/cVBEO/K4rfmxn711hFFD/1z2bwflUx9Yn+edcW3WH7E911y5Jqf/3C056l5bc/bL7zXu8KB3fGZYDqZSmcr89j1Zij7nY5hIzKx4HF9VD01yy44J8cNNLp+Z5OVzvONjSd7XWjtneP6GJB/MYOjXsRlsSzKV5J3zia13yUpr7W+r6h5JzspgUv1FSZ7UWps96Z6e2FFm/9w/rc/n/mn9Ttd3JCvXfXtNzn39MXe7tuP8Bx5zh2QFkrz+xffOc757XU57xq1Zt34qV16yf8589vH52gVrxx0a9JLPDCyJU5J8Ysb5jjnh52YwST5JfiFJZe5k48QMFsva4bjhvYcnuTHJZ5I8urV243wCq9YmZ0fxYca34davn5CD1/VqhBv01o8f+9BxhwDABNvetuWT+fskWT+cY7xP2PF76asv+LHsv3a09YHNd2zP7zzqo8kE/Mz9Rg8AAPRS74aBAQDApJjK/OeQLEWfk0JlBQAA6CXJCgAA0EuGgQEAQEfGvIP9sjc53wkAADBRVFYAAKAjU21FpkZc6Rh1f12anO8EAACYKJIVAACglwwDAwCAjrRUpke8z0obcX9dUlkBAAB6SWUFAAA6YoL94kzOdwIAAEwUlRUAAOjIdKtMt9HOIRl1f11SWQEAAHpJsgIAAPSSYWAAANCRqazI1IjrA6Pur0uT850AAAATRWUFAAA6YoL94qisAAAAvSRZAQAAeskwMAAA6Mh0VmR6xPWBUffXpcn5TgAAgImisgIAAB2ZapWpEU94H3V/XVJZAQAAekllBQAAOmLp4sVRWQEAAHpJsgIAAPSSYWAAANCR1lZkuo22PtBG3F+XJuc7AQAAJorKCgAAdGQqlamMeOniEffXJZUVAACglyQrAABALxkGBgAAHZluo9/3ZLqNtLtOqawAAAC9pLICAAAdmR7D0sWj7q9Lk/OdAAAAE0WyAgAA9JJhYAAA0JHpVKZHvO/JqPvrksoKAADQSyorAADQkalWmRrx0sWj7q9LKisAAEAvqawAAEBHLF28OJPznQAAABNFsgIAAPSSYWAAANCR6VSmRzzh3dLFAAAAHVNZAQCAjrQxbArZVFYAAAC6JVkBAAB6yTAwAADoyHQbwwR7O9gDAAB0S2UFAAA6Ygf7xZmc7wQAAJgoKisAANARc1YWR2UFAADoJckKAADQS4aBAQBAR6bHsIP9qPvrksoKAADQSyorAADQERPsF0dlBQAA6CXJCgAA0EuSFQAA6MiOYWCjPuajqh5XVR+sqmuqqlXV02Zdf9uwfebx4b1474uq6qqq2lxVF1TVI+f305OsAADAvu6gJF9O8qLd3PPhJMfMOP7T7l5YVT+f5Owkr07y8OH7z6+qI+cTmAn2AADQkeUwwb61dl6S85Kkas5nt7TWrpvHa1+W5C2ttbcO3/uCJD+Z5FeS/P7evkRlBQAA2JPHV9UNVXVZVf1xVR0+141VtTrJI5J8dEdba216eP6Y+XSqsgIAAB0Zc2Vl3axKyZbW2pYFvPLDSd6b5MokJyZ5bZLzquoxrbWpXdx/RJKVSa6f1X59kgfOp2PJCgAATKarZ52/Osmr5vuS1trfzDj9alV9Jck3kzw+yccWGtzekKwAAMBkOi7JxhnnC6mq7KS1dkVV3ZTkftl1snJTkqkkR81qPyrJfOa9mLMCAABdaUmmUyM92ve739hau33GsSTJSlUdl+TwJNfu8ntubWuSC5OcNuOZFcPzz86nL5UVAADYh1XV2gyqJDscX1UPTXLL8PidJH+XQVXkxCSvT/KNJOfPeMfHkryvtXbOsOnsJOdW1ReTfD7JSzJYIvmt84lNsgIAAB1ZDksXJzklySdmnJ89/Hpukhcm+YEkz0lySJJrknwkyW/PqtScmMHE+iRJa+1vq+oeSc5KcnSSi5I8qbU2e9L9bklWAABgH9Za+2SS3WU4P74X77jvLtrOSXLOznfvPXNWAACAXlJZAQCAjiyTYWC9pbICAAD0ksoKAAB0RGVlcVRWAACAXlJZAQCAjqisLI7KCgAA0EuSFQAAoJcMAwMAgI60VmkjHpY16v66pLICAAD0ksoKAAB0ZDqV6Yx4gv2I++uSygoAANBLkhUAAKCXDAMDAICO2GdlcVRWAACAXlJZAQCAjli6eHFUVgAAgF5SWQEAgI6Ys7I4KisAAEAvSVYAAIBeMgwMAAA6YoL94qisAAAAvaSyAgAAHWljmGA/SZWViUxWnv6Ah2S/WjXuMGBZ+KVLrx53CLDs/O1P/NC4Q4DlY3pLctW4g2C5MgwMAADopYmsrAAAQB+0JK2Nvs9JobICAAD0ksoKAAB0ZDqVyoh3sB9xf11SWQEAAHpJZQUAADpiU8jFUVkBAAB6SbICAAD0kmFgAADQkelWqREPy5o2DAwAAKBbKisAANCR1sawKeQE7QqpsgIAAPSSZAUAAOglw8AAAKAj9llZHJUVAACgl1RWAACgIyori6OyAgAA9JJkBQAA6CXDwAAAoCN2sF8clRUAAKCXVFYAAKAjdrBfHJUVAACgl1RWAACgI4PKyqiXLh5pd51SWQEAAHpJsgIAAPSSYWAAANARO9gvjsoKAADQSyorAADQkTY8Rt3npFBZAQAAekmyAgAA9JJhYAAA0BET7BdHZQUAAOgllRUAAOiKGfaLorICAAD0ksoKAAB0ZQxzVmLOCgAAQLckKwAAQC8ZBgYAAB1pbXCMus9JobICAAD0kmQFAAA6smNTyFEf81FVj6uqD1bVNVXVquppM66tqqr/VVVfrao7h/e8vaqO3cM7XzV818zj0vn+/CQrAACwbzsoyZeTvGgX1w5M8vAkrxl+/ZkkJyX5wF689+Ikx8w4Tp1vYOasAADAPqy1dl6S85KkqmZf25Dk9JltVfXrST5fVfdurX17N6/e3lq7bjGxSVYAAKArrUa/78n3+1s3K/nY0lrbsgQ9rE/Skty2h/vuX1XXJNmc5LNJzthDcrMTw8AAAGAyXZ1kw4zjjMW+sKr2T/K/kryztXb7bm69IMlzkzwpyQuTHJ/k01W1bj79qawAAEBHxrx08XFJNs64tKiqSlWtSvKuJJVBArKbGNp5M06/UlUXJPlWkp9L8ud726dkBQAAJtPGPVQ/9tqMROU+SX50vu9trd1WVV9Pcr/5PGcYGAAAdKWN6VhCMxKV+yf5sdbazQt4x9okJya5dj7PSVYAAGAfVlVrq+qhVfXQYdPxw/N7DxOV9yQ5JckvJVlZVUcPj9Uz3vGx4SphO87fUFU/UlX3rarHJnlfkqkk75xPbIaBAQDAvu2UJJ+YcX728Ou5SV6V5KeH5xfNeu4JST45/POJSY6Yce24DBKTw5PcmOQzSR7dWrtxPoFJVgAAoCML2VF+Kfqc3/3tkxlMmp/LHl/YWrvvrPNfmFcQczAMDAAA6CWVFQAA6NKIly6eJCorAABAL0lWAACAXjIMDAAAOrIcJtj3mcoKAADQSyorAADQlQ52lN+rPieEygoAANBLKisAANCZyl7sqdhBn5NBZQUAAOglyQoAANBLhoEBAEBXTLBfFJUVAACgl/aqslJVj1vIy1trn1rIcwAAMBFUVhZlb4eBfTLz+7ZreP/K+QYEAACQ7H2y8oROowAAAJhlr5KV1to/dx0IAABMnFaDY9R9TohFT7CvqmOq6j9W1UFLERAAAECyiGSlqp5aVZcmuTrJl5I8ath+RFX9W1U9bWlCBACA5am18RyTYkHJSlX9VJL3JrkpyaszmFCfJGmt3ZTku0metxQBAgAA+6aFVlbOTPKp1tqpSd60i+ufTfKwBUcFAACToI3pmBALTVZOTvKu3Vy/PsmRC3w3AADAgpOVTUl2N6H+hCQ3L/DdAAAAC05WPpHkOVW109LHVXV0kv+S5COLCQwAAJa9HUsXj/qYEAtNVv6/JMcl+UKSX8tgZNyPV9XvJvlqBhPuX70kEQIAAPukvd3B/m5aa5dV1alJ/iDJazJITl4xvPzJJC9qrV21FAECAMByVW1wjLrPSbGgZCVJWmsXJ/mxqjo0yf0yqNJc0Vq7camCAwAA9l0LTlZ2aK3dmsFwMAAAgCWzmB3s71FVb6iqf6+qTcPj34dtRy1lkAAAsCzZZ2VRFrqD/YMzmEj/siQbkrx7eGwYtn2lqk5eqiABAIB9z0KHgb0pycokj2qt3W0IWFU9MsmHkvxRkicsLjwAAFjGxrGUsKWL88gkfzA7UUmS1trnM1gl7FGLCQwAANi3LbSyckOSzbu5vnl4DwAA7LvGMYdkX5+zkuSNSV443K3+bqrq2CQvHN4DAACwIHtVWamql+2i+Y4k36iq9yX5xrDt/kmeNjyfnMFyAADAyO3tMLA37ObaL+2i7QeGz/zfeUcEAACTwjCwRdnbZOX4TqMAAACYZa+Sldbat7oOBAAAJo7KyqIseAd7AACALi106eJU1Q8k+Y0kD0+yPjsnPq21duIiYgMAAPZhC6qsVNXjk3w+yVOSXJPkhCRXDP98nwxWCvvUkkQIAADL1Y4d7Ed9TIiFDgM7K4Pk5KQkzxu2vba1dmqSxyY5Lsm7Fh8eAACwr1posvLwJH/eWrs9ydSwbWWStNYuSPKnSV6z+PAAAGD5qjaeY1IsNFnZnmTj8M+3JdmW5MgZ169I8qCFhwUAAOzrFjrB/hsZ7Faf1lqrqkuTPD3JXw2v/2SS6xYfHsvdqtXTefYrrstpz7g1a9dP5cpLDsi5rz86X/rUunGHBmO17c7KJX++Ljd9ZXVu/urqbN2wIo9+7S058Wc27XTvt847IJe8bW1uv2JVakXLIfffngf954255+M3jyFy6I/7P/DWnPbk7+QHHn5zjjp6U27fsDqXXXxo3v6WB+aa76wdd3jAElhoZeVDSf5TVe1Ids5O8jNVdXlVXZ7kpzMYCsY+7uVv/E5+5vk35uPvOzR/fOY9Mz2dvOYvr8iDH3nHuEODsdpy64p89f8dnA1X7JdDT9o6532X/eVB+cxLD8+aQ6bz0JdvyEP+68Zsu6PyyRcckW9/ZP8RRgz987PP+kZ+6EeuzZe/eET+9A9Ozoc/cJ+c/NCb84d/8c+5z/G3jzs8GGhjOibEQisrr0nyBxnOV2mtnVtVU0meMWz7vdba25YkQpatkx66KU942m15y1nH5D1/Mhgl+NH3HJo3f/yy/OdXXpuX/vT9xxwhjM8BR07lZz59TQ64x3Ru/uqqfPiZu048LnvH2hz+kK15/J/cnBou7nLiM+7Mex93TK58/0G59xNVV9h3vf9vTsz/ftUjsn379//u9dMfOzZvevsn88xfvjxvOOsRY4wOWAoLqqy01ra11m5urbUZbe9orT29tfazC01UqupxVfXBqrqmqlpVPW0h76EfTn3KbZnannzoHYd/r23blhX58DsPy4NO2ZR7HDv33ybDpFu5OjngHtN7vG/bnSuy5rCp7yUqSbJqbct+B7Ws3H+C/uoMFuCSrx12t0QlSa65em2+feW63Os+KvgwCfq2g/1BSb6c5EXjDoTFu9/Jd+XqK9Zk0x0r79Z+2UUHJklOePBd4wgLlpWjHrkl135m/1z2lwfljqtXZsMV++XzZx2SbRsrJ/2yX8ZgZy2HHLYlGzasHncgwBLYq2FgVfXxBby7tdZOm+cD5yU5b9jnArqkTw47cntuuX7VTu072g4/avuoQ4Jl55T/77ZsuXVFvvh7h+aLvzdoW3PoVE576025x8NUJ2G2Jzzx6hxx5Oa8488eOO5QIElSGf1SwpP0W/TezllZkflP1ZmknxMLsHr/6WzbuvO/Blu3DNrW7L/nITCwr1u5f8u647fnwKPuzD0fvznb7qxceu7afOq/HZ4nvuOGrLvP1J5fAvuI4+69MS98+VdzyVcPzcfOu9e4wwGWwF4lK621x3ccx4JU1Zoka2Y0WQ+3R7ZuXpFVq3fOcVevGbRt2dy3UYjQP59+yeFZsbLl8X9y8/fajjvtrnzgx4/ORW9cnx/+v7eMMTroj0MP25xXveGC3HnHqrz2lT+Y6Wl/ZwqTYLn/tnhGkg0zjqvHGw4z3XLDfjnsqG07te9ou/n6hS5GB/uGjd9ZmWs/vX+O+9G7r/i15pCWIx+xNTd+yZh8SJIDD9qWV/+fz+Wgtdty5ssfnVtusqw3PdJqPMeEWO7JyuuSrJ9xHDfecJjpmxcfkONO2JID1959mMoDHzbY9O6Kiw8YR1iwbGy+abA4xfQuRkxOb0/a1OT8zwgWatXqqfzO6y/IPe91Z179ikflO1cZZAGTZFknK621La2123ccSTaOOya+79P/sD4r90ue/KzvD19ZtXo6T/z5W3LJhQfmxmv8rTDszrr7bE+taPn2hw5ImzGictN1K3PDF9fk0P+wc+US9iUrVrT85lkX5oEn35rX/fYpufTiw8YdEuzMppCL0qtxOFW1Nsn9ZjQdX1UPTXJLa+3b44mKhbrs3w7Kpz6wPs8749qsP2J7rrlyTU7/uVty1L225uyXm/gIl73joGzduCJ33TCooHz3E/tn0/WDP5/0rDuy/2HTOeEZd+ab716bjz33iNzr9Luy7c4VufydB2VqS+XBz7dDN/u2X/2Ni/PoH74uF3zmqKxbtzVPeOJ37nb9Ex/x/xpY7nqVrCQ5JcknZpyfPfx6bpLnjjwaFu31L753nvPd63LaM27NuvVTufKS/XPms4/P1y5YO+7QYOwu+Yt1ufOa7/9n+Dv/dGC+80+DPx//U5uyet1UHvk7t+XQk7blm393UC46e32S5PCHbM1jfv/WHPWDli5m33bC/TYkSR516vV51KnX73RdsgLLX6+SldbaJ2PJ44mybcuK/Nlrjs2fvebYcYcCvfO0j1+3x3tW7Jec9Kw7c9Kz7hxBRLC8nPEbPzTuEGDPxjEsyzCwgaq6Z5LHJTkyyd+11q6uqpUZTHbf0FqzAQAAALAgC0pWarC9/P9J8uvDd7QkX81g6eC1Sa5KcmaSNy5FkAAAsBxVG8MO9hNUWVnoamCvSPLiJG9IcnpmDN1qrW1I8t4kz1h0dAAAwD5rocPA/kuSt7fWfquqDt/F9a8k+YmFhwUAABPAnJVFWWhl5V5J/nU31+9McvAC3w0AAIxIVT2uqj5YVddUVauqp826XlV1VlVdW1V3VdVHq+r+e/HeF1XVVVW1uaouqKpHzje2hSYrN2SQsMzlEUnsiwIAAP13UJIvJ3nRHNf/R5L/luQFSR6VQWHi/Kraf64XVtXPZ7ANyauTPHz4/vOr6sj5BLbQZOW9SV5QVSfMaGvDwJ6YwZ4o717guwEAYDIsgx3sW2vntdZe2Vp73+xrw4W1XpLkd1trf99a+0qSZyc5NsnTdvPalyV5S2vtra21f88g0dmU5FfmE9tCk5XfSXJtkouSvD2DH8n/rKrPJDkvgzkrr13guwEAgMVbV1UHzzjWLOAdxyc5OslHdzQMF9S6IMljdvVAVa3OYKTVzGemh+e7fGYuC0pWhgE+Osnrk9wzyeYkP5LkkAxKPT/cWtu0kHcDAMCk2LF08aiPoauTbJhxnLGAb+Ho4dfrZ7VfP+PabEckWTnPZ3ZpwZtCttbuSvK7wwMAAOiX45JsnHG+ZVyBLNRCh4EBAAD9trG1dvuMYyHJynXDr0fNaj9qxrXZbkoyNc9ndmmhO9j/xV7c1lprv7qQ9wMAwERoNThG3efSuTKDBOO0DOarp6oOzmBVsD/eZfetba2qC4fPvH/4zIrh+Tnz6Xyhw8B+NDuvM7AyyTHDrzdmsKQZAADQY1W1Nsn9ZjQdX1UPTXJLa+3bVfXGJK+sqsszSF5ek+SaDBOR4Ts+luR9rbUdycjZSc6tqi8m+XwGK4odlOSt84ltQclKa+2+u2qvqlVJfm0YzOkLeTcAAEyM5bGD/SlJPjHj/Ozh13Mz2JLk9RkkGm/OYEGtzyR5Umtt84xnTsxgYv0ghNb+tqrukeSsDCbVXzR8Zvak+91a8AT7XWmtbUtyTlU9KIMSz08u5fsBAICl1Vr7ZJI5x4611lqSM4fHXPfcdxdt52Sew75mW9JkZYYvJ/nljt4NAADLwqylhEfW56ToajWw0zPYoRIAAGBBFroa2FwloEOSPC7Jw5P8/gJjAgAAWPAwsFfN0X5rkm8meUGStyzw3QAAMBmWxwT73lroamA2kwQAADo176Sjqg6oqrOr6qe6CAgAACZG+/4k+1Edk1RZmXey0lq7K4O9VI5a+nAAAAAGFjqc68IkJy9lIAAAADMtdIL9S5J8qKq+luRtrbXtSxcSAABMCBPsF2Wvk5WqelySS1prNyY5N8l0kj9N8odV9d0kd816pLXW/uOSRQoAAOxT5lNZ+USSZyV5Z5Kbk9yU5LIuggIAgImgsrIo80lWaniktfb4TqIBAAAYWuicFQAAYA++t5zwiPucFPNdDWyCvnUAAKDP5pusvKOqpvbysEIYAACwYPMdBvbRJF/vIhAAAICZ5pusnNta++tOIgEAAJjBBHsAAOiKpYsXZb5zVgAAAEZCsgIAAPTSXg8Da61JbAAAYB7ss7I4EhAAAKCXTLAHAIAuTVClY9RUVgAAgF5SWQEAgK5YunhRVFYAAIBekqwAAAC9ZBgYAAB0xNLFi6OyAgAA9JLKCgAAdMUE+0VRWQEAAHpJsgIAAPSSYWAAANARE+wXR2UFAADoJZUVAADoign2i6KyAgAA9JLKCgAAdEVlZVFUVgAAgF6SrAAAAL1kGBgAAHTE0sWLo7ICAAD0ksoKAAB0xQT7RVFZAQAAekmyAgAA9JJhYAAA0BXDwBZFZQUAAOgllRUAAOiIpYsXR2UFAADoJckKAADQS4aBAQBAV0ywXxSVFQAAoJdUVgAAoCMm2C+OygoAANBLKisAANAVc1YWRWUFAADoJckKAADQS4aBAQBAVwwDWxSVFQAAoJdUVgAAoCM1PEbd56RQWQEAAHpJsgIAAPSSYWAAANAVE+wXRWUFAAD2YVV1VVW1XRxvmuP+5+7i3s1dxKayAgAAHak2OEbd5zz9YJKVM85PTvJPSd69m2duT3LSjPNOvkvJCgAA7MNaazfOPK+q30zyzST/vPvH2nWdBhbDwAAAoDttTMcCVdXqJM9K8hettd29aW1VfauqvlNVf19VD154r3OTrAAAwGRaV1UHzzjW7MUzT0tySJK37eaey5L8SpKnZpDYrEjyr1V13OLC3ZlkBQAAJtPVSTbMOM7Yi2d+Ncl5rbVr5rqhtfbZ1trbW2sXtdb+OcnPJLkxya8tQcx3Y84KAAB0aXxLCR+XZOOM8y27u7mq7pPkxzJIPvZaa21bVf1bkvvNO8I9UFkBAIDJtLG1dvuMY7fJSpLnJbkhyT/Op5OqWpnkIUmuXWCcc1JZAQCAjiyTpYtTVSsySFbOba1tn3Xt7Um+21o7Y3h+ZpLPJflGBvNbXpHkPkn+bDFx74pkBQAA+LEk907yF7u4du8k0zPOD03yliRHJ7k1yYVJHtta+/elDkqyAgAA+7jW2keS1BzXHj/r/KVJXjqCsCQrAADQmUXue7LgPieECfYAAEAvqawAAEBHlssE+75SWQEAAHpJZQUAALpizsqiqKwAAAC9JFkBAAB6yTAwAADoiAn2iyNZgX3cXz3wuHGHAMvOt9998LhDgGVjatPm5NnjjoLlSrICAABdMcF+UcxZAQAAekmyAgAA9JJhYAAA0BXDwBZFZQUAAOgllRUAAOiIpYsXR2UFAADoJZUVAADoijkri6KyAgAA9JJkBQAA6CXDwAAAoCPVWqqNdlzWqPvrksoKAADQSyorAADQFRPsF0VlBQAA6CXJCgAA0EuGgQEAQEfsYL84KisAAEAvqawAAEBXTLBfFJUVAACgl1RWAACgI+asLI7KCgAA0EuSFQAAoJcMAwMAgK6YYL8oKisAAEAvqawAAEBHTLBfHJUVAACglyQrAABALxkGBgAAXTHBflFUVgAAgF5SWQEAgA5N0oT3UVNZAQAAekllBQAAutLa4Bh1nxNCZQUAAOglyQoAANBLhoEBAEBH7GC/OCorAABAL6msAABAV2wKuSgqKwAAQC9JVgAAgF4yDAwAADpS04Nj1H1OCpUVAACgl1RWAACgKybYL4rKCgAA0EuSFQAAoJcMAwMAgI7YwX5xVFYAAIBeUlkBAICutDY4Rt3nhFBZAQAAekllBQAAOmLOyuKorAAAAL0kWQEAAHrJMDAAAOiKHewXRWUFAADoJZUVAADoiAn2i6OyAgAA+7CqelVVtVnHpXt45plVdWlVba6qr1bVk7uITbICAABcnOSYGcepc91YVY9N8s4kf57kYUnen+T9VXXyUgdlGBgAAHRl+exgv721dt1e3vviJB9urf3v4flvV9XpSX49yQsW0vlcVFYAAGAyrauqg2cca3Zz7/2r6pqquqKq/qqq7r2bex+T5KOz2s4fti8pyQoAAHRkxwT7UR9DVyfZMOM4Y44wL0jy3CRPSvLCJMcn+XRVrZvj/qOTXD+r7fph+5IyDAwAACbTcUk2zjjfsqubWmvnzTj9SlVdkORbSX4ug3kpYyNZAQCArox3U8iNrbXb5/14a7dV1deT3G+OW65LctSstqOG7UvKMDAAAOB7qmptkhOTXDvHLZ9NctqsttOH7UtKsgIAAPuwqnpDVf1IVd13uCzx+5JMZbA8carq7VX1uhmP/EGSJ1XVy6vqgVX1qiSnJDlnqWMzDAwAADqyTHawPy6DxOTwJDcm+UySR7fWbhxev3eS6R03t9b+tap+McnvJnltksuTPK219rXFRb4zyQoAAOzDWmu/sIfrj99F27uTvLurmHaQrAAAQFem2+AYdZ8TwpwVAACglyQrAABALxkGBgAAXRnvPivLnsoKAADQSyorAADQkcoYli4ebXedUlkBAAB6SWUFAAC60trgGHWfE0JlBQAA6CXJCgAA0EuGgQEAQEeqjWGC/eSMAlNZAQAA+kllBQAAumJTyEVRWQEAAHpJsgIAAPSSYWAAANCRai014n1PRt1fl1RWAACAXlJZAQCArkwPj1H3OSFUVgAAgF5SWQEAgI6Ys7I4KisAAEAvSVYAAIBeMgwMAAC6Ygf7RVFZAQAAekllBQAAutLa4Bh1nxNCZQUAAOglyQoAANBLhoEBAEBHqg2OUfc5KVRWAACAXlJZoVOrVk/n2a+4Lqc949asXT+VKy85IOe+/uh86VPrxh0a9JLPDMyt7prKwR+4Kasv35TV37grK++cys3/9bjc+YRD73bfQR+9JQd96rasumZLVtw5lalD98vmB6/NhmcemakjV48pevZZJtgvisoKnXr5G7+Tn3n+jfn4+w7NH595z0xPJ6/5yyvy4EfeMe7QoJd8ZmBuKzZOZf17bsiq727JtvvuP+d9q6+8K9uPXJXbn3pEbvkvx+bOxx2SA/5tY47+zW9k5S3bRhgxsFgqK3TmpIduyhOedlvectYxec+fHJkk+eh7Ds2bP35Z/vMrr81Lf/r+Y44Q+sVnBnZv6tD9cvWbH5jpQ1dl9Tc35ejf/OYu77v1v9xzp7ZNP7g+x/zmN3LQP9+a259+ZNehwvfU9OAYdZ+TQmWFzpz6lNsytT350DsO/17bti0r8uF3HpYHnbIp9zh26xijg/7xmYE9WLUi04euWtCjU0cOnqs7J+i3ONgH9CpZqaozquoLVbWxqm6oqvdX1UnjjouFud/Jd+XqK9Zk0x0r79Z+2UUHJklOePBd4wgLestnBpbWio3bs2LD9qz+5qYc9qarkyRbHnLQmKMC5qNvw8B+JMmbknwhg9hem+QjVfWg1tqdY42MeTvsyO255fqd/wZsR9vhR20fdUjQaz4zsLTu+WuXprYNJhpPrVuZW37lmGz+jxarYMRMsF+UXiUrrbUnzTyvqucmuSHJI5J8ahwxsXCr95/Otq21U/vWLYO2NfsrxcNMPjOwtG74rfumtrWsunpzDvr0bVmx2WcIlpteJSu7sH749ZZdXayqNUnWzGjy1yU9snXziqxavXNmv3rNoG3L5l6NQoSx85mBpbXl5LVJks0PW5e7fvDgHP2yyzO9/4rc8RNHjDky9ilteIy6zwnR2//zVdWKJG9M8i+tta/NcdsZSTbMOK4eTXTsjVtu2C+HHbXzEpE72m6+vu+5MoyWzwx0Z/vRa7Lt+ANy0KdvG3cowDz0NlnJYO7KyUl+YTf3vC6D6suO47gRxMVe+ubFB+S4E7bkwLVTd2t/4MM2JUmuuPiAcYQFveUzA92qrdNZsclQMFhOepmsVNU5SZ6S5AmttTmrJa21La2123ccSTaOLEj26NP/sD4r90ue/Kybv9e2avV0nvjzt+SSCw/MjdfYRRhm8pmBJTDVUndM7dS8+vJNWfXtzdl6oqSf0arWxnJMil6NKaiqSvJHSZ6e5PGttSvHHBKLcNm/HZRPfWB9nnfGtVl/xPZcc+WanP5zt+Soe23N2S+/17jDg97xmYE9W3veTVmxafp7O9EfcOHt3/vzxicN9ii65wsuzabHrs+2e61J239FVn1rcw765K2ZPnBlNvysDSFhOelVspLB0K9fTPLUJBur6uhh+4bWmg0GlqHXv/jeec53r8tpz7g169ZP5cpL9s+Zzz4+X7tg7bhDg17ymYHdO/iDN2W/G78/t+vAC27PgRfcniS584cPydSh++WO0w7N/hffmQM/tyG1tWXqsP2y6YcOyYZnHJmpI1UoGTFLFy9KtR59M1U1VzDPa629bS+ePzjJhsfnqdmvFrbDLQDsybff/ZBxhwDLxtSmzfnms1+XJOuHw/b3CTt+L33CI87IfvvtP9K+t2/fnE9cOBk/815VVlprO28wAAAAy1VLMup1HfpTi1i0Xk6wBwAAkKwAAAC91KthYAAAMEnGsZTwJC1drLICAAD0ksoKAAB0pWUMSxePtrsuqawAAAC9JFkBAAB6yTAwAADoih3sF0VlBQAA6CWVFQAA6Mp0khpDnxNCZQUAAOglyQoAANBLhoEBAEBH7GC/OCorAABAL6msAABAVyxdvCgqKwAAQC+prAAAQFdUVhZFZQUAAOglyQoAAOzDquqMqvpCVW2sqhuq6v1VddIennluVbVZx+aljs0wMAAA6MryGAb2I0nelOQLGeQHr03ykap6UGvtzt08d3uSmUnNkn+jkhUAANiHtdaeNPO8qp6b5IYkj0jyqd0/2q7rMDTJCgAAdGY6SY2hz4F1VXfrfEtrbctevGH98Oste7hvbVV9K4OpJV9K8luttYvnEekembMCAACT6eokG2YcZ+zpgapakeSNSf6ltfa13dx6WZJfSfLUJM/KIK/416o6bpEx343KCgAATKbjkmyccb43VZU3JTk5yam7u6m19tkkn91xXlX/muSSJL+W5LfnHekcJCsAANCRai014gn2M/rb2Fq7fa+fqzonyVOSPK61dvV8+mytbauqf0tyv/k8tyeGgQEAwD6sBs5J8vQkP9pau3IB71iZ5CFJrl3K2FRWAACgK8tj6eI3JfnFDOafbKyqo4ftG1prdyVJVb09yXdba2cMz89M8rkk30hySJJXJLlPkj9bbPgzSVYAAGDf9sLh10/Oan9ekrcN/3zvzFxnLDk0yVuSHJ3k1iQXJnlsa+3flzIwyQoAAHRluiU14srK9Pz6a63tcXHl1trjZ52/NMlL59XRApizAgAA9JJkBQAA6CXDwAAAoCvLY4J9b6msAAAAvaSyAgAAnRlDZSUqKwAAAJ2SrAAAAL1kGBgAAHTFBPtFUVkBAAB6SWUFAAC6Mt0y8gnv89zBvs9UVgAAgF5SWQEAgK606cEx6j4nhMoKAADQS5IVAACglwwDAwCArli6eFFUVgAAgF5SWQEAgK5YunhRVFYAAIBekqwAAAC9ZBgYAAB0xQT7RVFZAQAAekllBQAAutIyhsrKaLvrksoKAADQSyorAADQFXNWFkVlBQAA6CXJCgAA0EuGgQEAQFemp5NMj6HPyaCyAgAA9JLKCgAAdMUE+0VRWQEAAHpJsgIAAPSSYWAAANAVw8AWRWUFAADoJZUVAADoynRLMuJKx7TKCgAAQKdUVgAAoCOtTae10W7SOOr+uqSyAgAA9JJkBQAA6CXDwAAAoCutjX7Cu6WLAQAAuqWyAgAAXWljWLpYZQUAAKBbkhUAAKCXDAMDAICuTE8nNeJ9T+yzAgAA0C2VFQAA6IoJ9ouisgIAAPSSygoAAHSkTU+njXjOSjNnBQAAoFuSFQAAoJcMAwMAgK6YYL8oKisAAEAvqawAAEBXpltSKisLpbICAAD0kmQFAADoJcPAAACgK60lGfG+J4aBAQAAdEtlBQAAOtKmW9qIJ9g3lRUAAIBuSVYAAIBeMgwMAAC60qYz+gn2I+6vQyorAABAL0lWAACgI226jeVYiKp6UVVdVVWbq+qCqnrkHu5/ZlVdOrz/q1X15AV1vBuSFQAA2MdV1c8nOTvJq5M8PMmXk5xfVUfOcf9jk7wzyZ8neViS9yd5f1WdvJRxSVYAAKArbXo8x/y9LMlbWmtvba39e5IXJNmU5FfmuP/FST7cWvvfrbVLWmu/neRLSX59IZ3PRbICAAD7sKpaneQRST66o621Nj08f8wcjz1m5v1D5+/m/gWZyNXAtmdbMjl74QDQM1ObNo87BFg2pu/aMu4Qxmocv5duz7Ydf1xXVTMvbWmt7eofyBFJVia5flb79UkeOEc3R89x/9HzCnYPJi1ZWZckn8mHxh0HAJPs2X8/7ghgOVqX5PZxBzFCW5Nc95l8aEl/eZ+HO5JcPavt1UleNfpQFm7SkpVrkhyXZOO4A2En6zL4wPjnA3vHZwbmx2em39Zl8HvaPqO1trmqjk+yetyxzDBXmeumJFNJjprVflSS6+Z45rp53r8gE5WstNZaku+OOw52NqMEubG1ti/9rQosiM8MzI/PTO/tk/9MWmubk/R+3GhrbWtVXZjktAxW9UpVrRienzPHY58dXn/jjLbTh+1LZqKSFQAAYEHOTnJuVX0xyeeTvCTJQUnemiRV9fYk322tnTG8/w+S/HNVvTzJPyb5hSSnJHn+UgYlWQEAgH1ca+1vq+oeSc7KYJL8RUme1FrbMYn+3kmmZ9z/r1X1i0l+N8lrk1ye5Gmtta8tZVw1GDkF3aqqNUnOSPK6OVahAGbwmYH58ZmBySRZAQAAesmmkAAAQC9JVgAAgF6SrAAAAL0kWQEAAHpJskKnqmpNVf2vqrqmqu6qqguq6vRxxwV9VVVrq+rVVfXhqrqlqlpVPXfccUEfVdUPVtU5VXVxVd1ZVd+uqndV1QPGHRuwNCQrdO1tSV6W5K+SvDjJVJIPVdWp4wwKeuyIJGcm+Q9JvjzmWKDv/meSZyT5WAb/j3lzkscl+VJVnTzOwIClYeliOlNVj0xyQZJXtNbeMGzbP8nXktzQWnvsOOODPhruFXFoa+26qjolyReSPK+19rbxRgb9U1WPTfLF1trWGW33T/LVJO9prT1rbMEBS0JlhS79bAaVlDfvaGitbU7y50keU1X3Gldg0FettS2ttevGHQcsB621f52ZqAzbLk9ycQbVSWCZk6zQpYcl+Xpr7fZZ7Z8ffn3oaMMBYNJVVSU5KslN444FWDzJCl06Jsm1u2jf0XbsCGMBYN/wS0numeRvxx0IsHiSFbp0QJItu2jfPOM6ACyJqnpgkjcl+WySc8ccDrAEJCt06a4ka3bRvv+M6wCwaFV1dJJ/TLIhyc+21qbGHBKwBPYbdwBMtGszKMXPdszw6zUjjAWACVVV65Ocl+SQJD/cWvP/F5gQKit06aIkD6iqg2e1P2rGdQBYsOGS+B9M8oAkT2mt/fuYQwKWkGSFLr0nycokz9/RMNxD4nlJLmitfWdcgQGw/FXVygwm0j8myTNba58dc0jAEjMMjM601i6oqncneV1VHZnkG0mek+S+SX51nLFBn1XVr2cwnGXHink/VVXHDf/8R621DWMJDPrn/yT56QwqK4dV1d02gWytvWMsUQFLxg72dGpYnn9NkmclOTTJV5L8dmvt/LEGBj1WVVcluc8cl49vrV01umigv6rqk0l+ZK7rrbUaXTRAFyQrAABAL5mzAgAA9JJkBQAA6CXJCgAA0EuSFQAAoJckKwAAQC9JVgAAgF6SrAAAAL0kWQEAAHpJsgKwl6rqqqp624zzx1dVq6rHjy2oWWbHuJv7WlW9agHvf+7w2VMWEt8c73xVVdmhGICdSFaAZWHGL8k7js1V9fWqOqeqjhp3fPNRVU9eSKIAAPua/cYdAMA8nZnkyiT7Jzk1yQuTPLmqTm6tbRpxLJ9KckCSrfN87slJXpTkVUsdEABMEskKsNyc11r74vDPf1ZVNyd5WZKnJnnnrh6oqoNaa3cudSCttekkm5f6vQDAgGFgwHL38eHX45Okqt5WVXdU1YlV9aGq2pjkr4bXVlTVS6rq4uEwsuur6k+r6tCZL6yBV1bV1VW1qao+UVUPnt3xXHNWqupRw75vrao7q+orVfXiHfFlUFXZMW+kzZyvsdQx7q2quk9V/b+quqyq7qqqm6vq3VV13zkeOXAY181VdXtVvX12jMP3/kRVfXr4c9hYVf+4mDgB2LeorADL3YnDrzfPaNsvyflJPpPkvyfZMTzsT5M8N8lbk/xhBgnOryd5WFX9UGtt2/C+s5K8MsmHhsfDk3wkyeo9BVNVpyf5hyTXJvmDJNcl+Q9JnjI8/9MkxyY5Pckv7+IVncc4hx9M8tgkf5Pk6iT3zWCI3Ser6kG7GGJ3TpLbMhjKdtLw3vtU1eNba234s/jlJOdm8M/ifyY5cHjfZ6rqYa21qxYYKwD7CMkKsNysr6ojMpiz8kMZzGG5K4MEYYc1Sd7dWjtjR0NVnZrkPyf5pdbaX89o/0SSDyd5ZpK/rqp7JPkfSf4xyU/N+MX795L81u4Cq6qVGSQb1yZ5aGvtthnXKklaa5+tqq8nOb219o5Zz3ce4278Y2vtPbPi+WCSzyZ5RpK/nHX/1iSn7UiequpbSV6f5KeSfKCq1maQbP1Za+35M955bpLLhnE+PwCwG4aBAcvNR5PcmOQ7GVQB7kjy9Nbad2fd98ezzp+ZZEOSf6qqI3YcSS4cvuMJw/t+LIPqxB/tSAKG3rgXsT0sg0rIG2cmKkky611zGUWMu9Rau2vHn6tqVVUdnuQbGVRPHr6LR948o8qTDH7e2zNYPCAZVI4OSfLOWd/LVJILZnwvADAnlRVguXlRkq9n8Ivx9UkuG050n2l7BkOZZrp/kvVJbpjjvUcOv95n+PXymRdbazdW1a17iG3HkLSv7eG+uYwixl2qqgOSnJHkeUnumaRmXF6/i0dm931HVV2bwfCxZPC9JN+fUzTb7QuJE4B9i2QFWG4+P2M1sLls2UUCsyKDJOCX5njmxkVHtnjjjPGPMkhU3pjB0K8NSVoG1auFVOF3PPPLGczbmW37At4JwD5GsgLsK76ZwfCpf5k55GkXvjX8ev8kV+xoHM4T2Wm1q130kSQnZzBcbS5zDQkbRYxz+dkk57bWXj7jfftnMJRrV+6f5BMz7l2b5JgMJvsn3/9Z3NBa293PAgDmZM4KsK94V5KVSX579oWq2q+qDhmefjTJtiS/sWNS/NBL9qKPL2WwYeVLZrxvRx8z33XnsO1u94woxrlM5e5Dv5LkN4bx7Mrzq2rVjPMXZvAXYOcNz8/PYKjXb826L8n3EisA2C2VFWCf0Fr756r60yRnVNVDM1jmd1sGFYJnJnlxkvcM5328IYP5G/9QVR/KYOL8TyS5aQ99TFfVC5N8MMlFVfXWDFYGe2CSByf58eGtFw6//mFVnZ9kqrX2N6OIcTf+IckvV9WGJP+e5DEZVHlunuP+1Uk+VlXvymDp4v+awVLRHxj+LG4f/iz+MsmXqupvMhjGdu8kP5nkXzJYkhkA5iRZAfYZrbUXVNWFSX4tyWszmDdxVZJ3ZPDL8w6vzGBn+hdksGrVBUmemMFSwXvq4/yqekKS30ny8gwq2N9M8pYZt703gzkiv5DkWRlUNP5mVDHO4cUZVFd+KYNlof8lg2Tl/Dnu//XhvWclWZXknUn+28zVyVprf11V1yT5zSSvyGBJ6e8m+XQG+8gAwG7V3q2mCQAAMFrmrAAAAL0kWQEAAHpJsgIAAPSSZAUAAOglyQoAANBLkhUAAKCXJCsAAEAvSVYAAIBekqwAAAC9JFkBAAB6SbICAAD0kmQFAADoJckKAADQS/8/OA47EBRXanAAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAysAAAKvCAYAAABqAVAmAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8WgzjOAAAACXBIWXMAAA9hAAAPYQGoP6dpAABBiklEQVR4nO3deZilZ1kn/u/dne7O0p3ORjbCkgQIA8FhiWxGBGMQEQVE1FFk0RmEQYdtmDH+MEJQcBgmgxpGBRWCKAoICEoIsgkoBAiGJSYhkAQI2bdOJ53eqp7fH+c0VKq7uruW95y3Tn8+1/Ve1e/zLs9dlZyk7r6fpVprAQAA6JsV4w4AAABgVyQrAABAL0lWAACAXpKsAAAAvSRZAQAAekmyAgAA9JJkBQAA6CXJCgAA0Ev7jTuApVRVleTYJBvHHQsAAN+zLsk1bR/bjbyq9k+yekzdb22tbR5T30tmopKVDBKVq8cdBAAAOzkuyXfHHcSoVNX+Rx+58q7rbpgaVwjXVdXxyz1hmbRkZWOSfOtL983Ba41wg73x9Ac8ZNwhADDBtmdbPpMPJfveyJfV190wlW9deN8cvG60v5fevnE693nEVUdnUNWRrPTNwWtXjPxfCliu9qtV4w4BgEm2Tw382tnadZW162qkfU5ntP11yW/0AABAL0lWAACAXprIYWAAANAHU206UyMeCjfVpkfbYYdUVgAAgF5SWQEAgI5Mp2V6xKsMjLq/LqmsAAAAvaSyAgAAHZnOdEY9g2T0PXZHZQUAAOglyQoAANBLhoEBAEBHplrLVBvthPdR99cllRUAAKCXVFYAAKAjli5eHJUVAACglyQrAABALxkGBgAAHZlOy5RhYAumsgIAAPSSygoAAHTEBPvFUVkBAAB6SWUFAAA6YlPIxVFZAQAAekmyAgAA9JJhYAAA0JHp4THqPieFygoAAOzDquqMqvpCVW2sqhuq6v1VddKse/avqjdV1c1VdUdV/V1VHbWH91ZVnVVV11bVXVX10aq6/3xik6wAAEBHpoabQo76mKcfSfKmJI9OcnqSVUk+UlUHzbjn/yb5qSTPHN5/bJL37uG9/yPJf0vygiSPSnJnkvOrav+9DcwwMAAA2Ie11p4087yqnpvkhiSPSPKpqlqf5FeT/GJr7ePDe56X5JKqenRr7XOz31lVleQlSX63tfb3w7ZnJ7k+ydOS/M3exKayAgAAk2ldVR0841izl8+tH369Zfj1ERlUWz6644bW2qVJvp3kMXO84/gkR896ZkOSC3bzzE5UVgAAoCNTbXCMus+hq2ddenWSV+3u2apakeSNSf6ltfa1YfPRSba21m6bdfv1w2u7cvSMe/b2mZ1IVgAAYDIdl2TjjPMte/HMm5KcnOTUTiKaJ8kKAAB0ZMxLF29srd2+t89V1TlJnpLkca21mVWZ65KsrqpDZlVXjhpe25XrZtxz7axnLtrbmMxZAQCAfdhwieFzkjw9yY+21q6cdcuFSbYlOW3GMycluXeSz87x2iszSFhmPnNwBquCzfXMTlRWAACgI9OpTKVG3uc8vSnJLyZ5apKNVbVjTsmG1tpdrbUNVfXnSc6uqluS3J7kj5J8duZKYFV1aZIzWmvva621qnpjkldW1eUZJC+vSXJNkvfvbWCSFQAA2Le9cPj1k7Pan5fkbcM/vzSDEWZ/l2RNkvOT/NdZ95+U768kliSvT3JQkjcnOSTJZ5I8qbW2eW8Dk6wAAMA+rLW2x1LMMMF40fDYq/e01lqSM4fHgkhWAACgI9NtcIy6z0lhgj0AANBLKisAANCRqTFMsB91f11SWQEAAHpJsgIAAPSSYWAAANARw8AWR2UFAADoJZUVAADoyHSrTO95G5Ml73NSqKwAAAC9pLICAAAdMWdlcVRWAACAXpKsAAAAvWQYGAAAdGQqKzI14vrA1Eh765bKCgAA0EsqKwAA0JE2hqWLm6WLAQAAuiVZAQAAeskwMAAA6Ih9VhZHZQUAAOgllRUAAOjIVFuRqTbipYvbSLvrlMoKAADQSyorAADQkelUpkdcH5jO5JRWVFYAAIBekqwAAAC9ZBgYAAB0xNLFi6OyAgAA9JLKCgAAdGQ8SxebYA8AANApyQoAANBLhoEBAEBHBvusjHbC+6j765LKCgAA0EsqKwAA0JHprMiUHewXTGUFAADoJckKAADQS4aBAQBAR+yzsjgqKwAAQC+prAAAQEemsyLTJtgvmMoKAADQSyorAADQkalWmWqj3aRx1P11SWUFAADoJckKAADQS4aBAQBAR6bGsIP9lAn2AAAA3VJZAQCAjky3FZke8aaQ0zaFBAAA6JZkBQAA6CXDwAAAoCMm2C+OygoAANBLKisAANCR6Yx+R/npkfbWLZUVAACgl1RWAACgI9NZkekR1wdG3V+XJuc7AQAAJopkBQAA6CXDwAAAoCNTbUWmRryD/aj769LkfCcAAMBEUVkBAICOTKcynVEvXTza/rqksgIAAPSSZAUAAOglw8AAAKAjJtgvjmSFRbvsogPyT+86LF/+17W5/jurc/ChU3ngI+7Mc//HdTnuxC3fu+/Sfzsw//Suw3Lplw7MlZcckKntlfOvuWh8gUMPrVo9nWe/4rqc9oxbs3b9VK685ICc+/qj86VPrRt3aNBLPjMw2SYn7WJs3vWmo/IvHzokDzv1jrzwrO/myc+6OV/73Nq86McfkKsu3f97933hYwfnw399WKqSY+69ZTdvhH3Xy9/4nfzM82/Mx993aP74zHtmejp5zV9ekQc/8o5xhwa95DND301lxViOSVGttXHHsGSq6uAkG279+gk5eN3k/EPqu4u/cGAe8B/vyqrV3/936btXrM6vnfbA/PBP3pb/ec63kyS33rhfDlw7lTUHtJzzW/fMB992D5WVHvjxYx867hAYOumhm/KHH7o8bznrmLznT45MkqxaM503f/yy3HbzfnnpT99/zBFCv/jMLA/b27Z8Mn+fJOtba7ePO55R2fF76Ru+eGoOWDvawUx33bE9//2UzyQT8DPv5W/0VfWiqrqqqjZX1QVV9chxx8TcHvyDm+6WqCTJPU/Ymvs8YHO+ffn3KyuH3mN71hwwOckxLLVTn3JbprYnH3rH4d9r27ZlRT78zsPyoFM25R7Hbh1jdNA/PjMsB9OtxnLMR1U9rqo+WFXXVFWrqqfNut7mOF6xm3e+ahf3Xzrfn1/vkpWq+vkkZyd5dZKHJ/lykvOr6sixBsa8tJbcdtN+WX/Y9nGHAsvG/U6+K1dfsSab7lh5t/bLLjowSXLCg+8aR1jQWz4zsGQOyuB37hfNcf2YWcevJGlJ/m4P77141nOnzjewPk6wf1mSt7TW3pokVfWCJD+ZwQ/l98cZGHvv4+89NDdduzrP/u/XjTsUWDYOO3J7brl+1U7tO9oOP0ryDzP5zMDSaK2dl+S8JKnauSrTWrvbL3RV9dQkn2itXbGHV2+f/ex89SpZqarVSR6R5HU72lpr01X10SSP2cX9a5KsmdFk6Y8e+Pbla3LObx2X//CIO/NjP3fLuMOBZWP1/tPZtnXn/0ls3TJoW7P/9KhDgl7zmWE5mB7DhPfp7/e3blbysaW1tqhVjqrqqAwKCc/Zi9vvX1XXJNmc5LNJzmitfXs+/fVtGNgRSVYmuX5W+/VJjt7F/Wck2TDjuLrT6NijW27YL2c++4QctG4qv/2WK7Ny5Z6fAQa2bl6x0/yvJFm9ZtC2ZXPf/pMN4+UzA3t0de7+u/IZS/DO5yTZmOS9e7jvgiTPTfKkJC9McnyST1fVvIoLvaqsLMDrMpjfssO6SFjG5s7bV+SVv3RC7rh9Zf7P+y7P4Ucrv8N83HLDfjn86G07tR921KDt5uuX+3+yYWn5zLAcTLcVmR7xJo0z+jsug8Rih6XYO+JXkvxVa23z7m4aDi3b4StVdUGSbyX5uSR/vred9e2vHG5KMpXkqFntRyXZabxba21La+32HUfu/g+DEdq6uXLmc07I1VesyVnnXpH7PMA+KjBf37z4gBx3wpYcuHbqbu0PfNimJMkVFx8wjrCgt3xmYI82zvxdeQmGgP1wkpOS/Nl8n22t3Zbk60nuN5/nepWstNa2JrkwyWk72qpqxfD8s+OKi92bmkp+7wX3zSUXHpRXvvmqPOiUTeMOCZalT//D+qzcL3nys27+Xtuq1dN54s/fkksuPDA3XrN6jNFB//jMwMj9apILW2tfnu+DVbU2yYlJrp3Pc32sj56d5Nyq+mKSzyd5SQbLqb11nEExtze/+p753EfW59Gnb8jG2/bLx/7u0LtdP+0ZtyZJrr96VT72nsOSJJd/ZbCs5F+/cVBEO/K4rfmxn711hFFD/1z2bwflUx9Yn+edcW3WH7E911y5Jqf/3C056l5bc/bL7zXu8KB3fGZYDqZSmcr89j1Zij7nY5hIzKx4HF9VD01yy44J8cNNLp+Z5OVzvONjSd7XWjtneP6GJB/MYOjXsRlsSzKV5J3zia13yUpr7W+r6h5JzspgUv1FSZ7UWps96Z6e2FFm/9w/rc/n/mn9Ttd3JCvXfXtNzn39MXe7tuP8Bx5zh2QFkrz+xffOc757XU57xq1Zt34qV16yf8589vH52gVrxx0a9JLPDCyJU5J8Ysb5jjnh52YwST5JfiFJZe5k48QMFsva4bjhvYcnuTHJZ5I8urV243wCq9YmZ0fxYca34davn5CD1/VqhBv01o8f+9BxhwDABNvetuWT+fskWT+cY7xP2PF76asv+LHsv3a09YHNd2zP7zzqo8kE/Mz9Rg8AAPRS74aBAQDApJjK/OeQLEWfk0JlBQAA6CXJCgAA0EuGgQEAQEfGvIP9sjc53wkAADBRVFYAAKAjU21FpkZc6Rh1f12anO8EAACYKJIVAACglwwDAwCAjrRUpke8z0obcX9dUlkBAAB6SWUFAAA6YoL94kzOdwIAAEwUlRUAAOjIdKtMt9HOIRl1f11SWQEAAHpJsgIAAPSSYWAAANCRqazI1IjrA6Pur0uT850AAAATRWUFAAA6YoL94qisAAAAvSRZAQAAeskwMAAA6Mh0VmR6xPWBUffXpcn5TgAAgImisgIAAB2ZapWpEU94H3V/XVJZAQAAekllBQAAOmLp4sVRWQEAAHpJsgIAAPSSYWAAANCR1lZkuo22PtBG3F+XJuc7AQAAJorKCgAAdGQqlamMeOniEffXJZUVAACglyQrAABALxkGBgAAHZluo9/3ZLqNtLtOqawAAAC9pLICAAAdmR7D0sWj7q9Lk/OdAAAAE0WyAgAA9JJhYAAA0JHpVKZHvO/JqPvrksoKAADQSyorAADQkalWmRrx0sWj7q9LKisAAEAvqawAAEBHLF28OJPznQAAABNFsgIAAPSSYWAAANCR6VSmRzzh3dLFAAAAHVNZAQCAjrQxbArZVFYAAAC6JVkBAAB6yTAwAADoyHQbwwR7O9gDAAB0S2UFAAA6Ygf7xZmc7wQAAJgoKisAANARc1YWR2UFAADoJckKAADQS4aBAQBAR6bHsIP9qPvrksoKAADQSyorAADQERPsF0dlBQAA6CXJCgAA0EuSFQAA6MiOYWCjPuajqh5XVR+sqmuqqlXV02Zdf9uwfebx4b1474uq6qqq2lxVF1TVI+f305OsAADAvu6gJF9O8qLd3PPhJMfMOP7T7l5YVT+f5Owkr07y8OH7z6+qI+cTmAn2AADQkeUwwb61dl6S85Kkas5nt7TWrpvHa1+W5C2ttbcO3/uCJD+Z5FeS/P7evkRlBQAA2JPHV9UNVXVZVf1xVR0+141VtTrJI5J8dEdba216eP6Y+XSqsgIAAB0Zc2Vl3axKyZbW2pYFvPLDSd6b5MokJyZ5bZLzquoxrbWpXdx/RJKVSa6f1X59kgfOp2PJCgAATKarZ52/Osmr5vuS1trfzDj9alV9Jck3kzw+yccWGtzekKwAAMBkOi7JxhnnC6mq7KS1dkVV3ZTkftl1snJTkqkkR81qPyrJfOa9mLMCAABdaUmmUyM92ve739hau33GsSTJSlUdl+TwJNfu8ntubWuSC5OcNuOZFcPzz86nL5UVAADYh1XV2gyqJDscX1UPTXLL8PidJH+XQVXkxCSvT/KNJOfPeMfHkryvtXbOsOnsJOdW1ReTfD7JSzJYIvmt84lNsgIAAB1ZDksXJzklySdmnJ89/Hpukhcm+YEkz0lySJJrknwkyW/PqtScmMHE+iRJa+1vq+oeSc5KcnSSi5I8qbU2e9L9bklWAABgH9Za+2SS3WU4P74X77jvLtrOSXLOznfvPXNWAACAXlJZAQCAjiyTYWC9pbICAAD0ksoKAAB0RGVlcVRWAACAXlJZAQCAjqisLI7KCgAA0EuSFQAAoJcMAwMAgI60VmkjHpY16v66pLICAAD0ksoKAAB0ZDqV6Yx4gv2I++uSygoAANBLkhUAAKCXDAMDAICO2GdlcVRWAACAXlJZAQCAjli6eHFUVgAAgF5SWQEAgI6Ys7I4KisAAEAvSVYAAIBeMgwMAAA6YoL94qisAAAAvaSyAgAAHWljmGA/SZWViUxWnv6Ah2S/WjXuMGBZ+KVLrx53CLDs/O1P/NC4Q4DlY3pLctW4g2C5MgwMAADopYmsrAAAQB+0JK2Nvs9JobICAAD0ksoKAAB0ZDqVyoh3sB9xf11SWQEAAHpJZQUAADpiU8jFUVkBAAB6SbICAAD0kmFgAADQkelWqREPy5o2DAwAAKBbKisAANCR1sawKeQE7QqpsgIAAPSSZAUAAOglw8AAAKAj9llZHJUVAACgl1RWAACgIyori6OyAgAA9JJkBQAA6CXDwAAAoCN2sF8clRUAAKCXVFYAAKAjdrBfHJUVAACgl1RWAACgI4PKyqiXLh5pd51SWQEAAHpJsgIAAPSSYWAAANARO9gvjsoKAADQSyorAADQkTY8Rt3npFBZAQAAekmyAgAA9JJhYAAA0BET7BdHZQUAAOgllRUAAOiKGfaLorICAAD0ksoKAAB0ZQxzVmLOCgAAQLckKwAAQC8ZBgYAAB1pbXCMus9JobICAAD0kmQFAAA6smNTyFEf81FVj6uqD1bVNVXVquppM66tqqr/VVVfrao7h/e8vaqO3cM7XzV818zj0vn+/CQrAACwbzsoyZeTvGgX1w5M8vAkrxl+/ZkkJyX5wF689+Ikx8w4Tp1vYOasAADAPqy1dl6S85KkqmZf25Dk9JltVfXrST5fVfdurX17N6/e3lq7bjGxSVYAAKArrUa/78n3+1s3K/nY0lrbsgQ9rE/Skty2h/vuX1XXJNmc5LNJzthDcrMTw8AAAGAyXZ1kw4zjjMW+sKr2T/K/kryztXb7bm69IMlzkzwpyQuTHJ/k01W1bj79qawAAEBHxrx08XFJNs64tKiqSlWtSvKuJJVBArKbGNp5M06/UlUXJPlWkp9L8ud726dkBQAAJtPGPVQ/9tqMROU+SX50vu9trd1WVV9Pcr/5PGcYGAAAdKWN6VhCMxKV+yf5sdbazQt4x9okJya5dj7PSVYAAGAfVlVrq+qhVfXQYdPxw/N7DxOV9yQ5JckvJVlZVUcPj9Uz3vGx4SphO87fUFU/UlX3rarHJnlfkqkk75xPbIaBAQDAvu2UJJ+YcX728Ou5SV6V5KeH5xfNeu4JST45/POJSY6Yce24DBKTw5PcmOQzSR7dWrtxPoFJVgAAoCML2VF+Kfqc3/3tkxlMmp/LHl/YWrvvrPNfmFcQczAMDAAA6CWVFQAA6NKIly6eJCorAABAL0lWAACAXjIMDAAAOrIcJtj3mcoKAADQSyorAADQlQ52lN+rPieEygoAANBLKisAANCZyl7sqdhBn5NBZQUAAOglyQoAANBLhoEBAEBXTLBfFJUVAACgl/aqslJVj1vIy1trn1rIcwAAMBFUVhZlb4eBfTLz+7ZreP/K+QYEAACQ7H2y8oROowAAAJhlr5KV1to/dx0IAABMnFaDY9R9TohFT7CvqmOq6j9W1UFLERAAAECyiGSlqp5aVZcmuTrJl5I8ath+RFX9W1U9bWlCBACA5am18RyTYkHJSlX9VJL3JrkpyaszmFCfJGmt3ZTku0metxQBAgAA+6aFVlbOTPKp1tqpSd60i+ufTfKwBUcFAACToI3pmBALTVZOTvKu3Vy/PsmRC3w3AADAgpOVTUl2N6H+hCQ3L/DdAAAAC05WPpHkOVW109LHVXV0kv+S5COLCQwAAJa9HUsXj/qYEAtNVv6/JMcl+UKSX8tgZNyPV9XvJvlqBhPuX70kEQIAAPukvd3B/m5aa5dV1alJ/iDJazJITl4xvPzJJC9qrV21FAECAMByVW1wjLrPSbGgZCVJWmsXJ/mxqjo0yf0yqNJc0Vq7camCAwAA9l0LTlZ2aK3dmsFwMAAAgCWzmB3s71FVb6iqf6+qTcPj34dtRy1lkAAAsCzZZ2VRFrqD/YMzmEj/siQbkrx7eGwYtn2lqk5eqiABAIB9z0KHgb0pycokj2qt3W0IWFU9MsmHkvxRkicsLjwAAFjGxrGUsKWL88gkfzA7UUmS1trnM1gl7FGLCQwAANi3LbSyckOSzbu5vnl4DwAA7LvGMYdkX5+zkuSNSV443K3+bqrq2CQvHN4DAACwIHtVWamql+2i+Y4k36iq9yX5xrDt/kmeNjyfnMFyAADAyO3tMLA37ObaL+2i7QeGz/zfeUcEAACTwjCwRdnbZOX4TqMAAACYZa+Sldbat7oOBAAAJo7KyqIseAd7AACALi106eJU1Q8k+Y0kD0+yPjsnPq21duIiYgMAAPZhC6qsVNXjk3w+yVOSXJPkhCRXDP98nwxWCvvUkkQIAADL1Y4d7Ed9TIiFDgM7K4Pk5KQkzxu2vba1dmqSxyY5Lsm7Fh8eAACwr1posvLwJH/eWrs9ydSwbWWStNYuSPKnSV6z+PAAAGD5qjaeY1IsNFnZnmTj8M+3JdmW5MgZ169I8qCFhwUAAOzrFjrB/hsZ7Faf1lqrqkuTPD3JXw2v/2SS6xYfHsvdqtXTefYrrstpz7g1a9dP5cpLDsi5rz86X/rUunGHBmO17c7KJX++Ljd9ZXVu/urqbN2wIo9+7S058Wc27XTvt847IJe8bW1uv2JVakXLIfffngf954255+M3jyFy6I/7P/DWnPbk7+QHHn5zjjp6U27fsDqXXXxo3v6WB+aa76wdd3jAElhoZeVDSf5TVe1Ids5O8jNVdXlVXZ7kpzMYCsY+7uVv/E5+5vk35uPvOzR/fOY9Mz2dvOYvr8iDH3nHuEODsdpy64p89f8dnA1X7JdDT9o6532X/eVB+cxLD8+aQ6bz0JdvyEP+68Zsu6PyyRcckW9/ZP8RRgz987PP+kZ+6EeuzZe/eET+9A9Ozoc/cJ+c/NCb84d/8c+5z/G3jzs8GGhjOibEQisrr0nyBxnOV2mtnVtVU0meMWz7vdba25YkQpatkx66KU942m15y1nH5D1/Mhgl+NH3HJo3f/yy/OdXXpuX/vT9xxwhjM8BR07lZz59TQ64x3Ru/uqqfPiZu048LnvH2hz+kK15/J/cnBou7nLiM+7Mex93TK58/0G59xNVV9h3vf9vTsz/ftUjsn379//u9dMfOzZvevsn88xfvjxvOOsRY4wOWAoLqqy01ra11m5urbUZbe9orT29tfazC01UqupxVfXBqrqmqlpVPW0h76EfTn3KbZnannzoHYd/r23blhX58DsPy4NO2ZR7HDv33ybDpFu5OjngHtN7vG/bnSuy5rCp7yUqSbJqbct+B7Ws3H+C/uoMFuCSrx12t0QlSa65em2+feW63Os+KvgwCfq2g/1BSb6c5EXjDoTFu9/Jd+XqK9Zk0x0r79Z+2UUHJklOePBd4wgLlpWjHrkl135m/1z2lwfljqtXZsMV++XzZx2SbRsrJ/2yX8ZgZy2HHLYlGzasHncgwBLYq2FgVfXxBby7tdZOm+cD5yU5b9jnArqkTw47cntuuX7VTu072g4/avuoQ4Jl55T/77ZsuXVFvvh7h+aLvzdoW3PoVE576025x8NUJ2G2Jzzx6hxx5Oa8488eOO5QIElSGf1SwpP0W/TezllZkflP1ZmknxMLsHr/6WzbuvO/Blu3DNrW7L/nITCwr1u5f8u647fnwKPuzD0fvznb7qxceu7afOq/HZ4nvuOGrLvP1J5fAvuI4+69MS98+VdzyVcPzcfOu9e4wwGWwF4lK621x3ccx4JU1Zoka2Y0WQ+3R7ZuXpFVq3fOcVevGbRt2dy3UYjQP59+yeFZsbLl8X9y8/fajjvtrnzgx4/ORW9cnx/+v7eMMTroj0MP25xXveGC3HnHqrz2lT+Y6Wl/ZwqTYLn/tnhGkg0zjqvHGw4z3XLDfjnsqG07te9ou/n6hS5GB/uGjd9ZmWs/vX+O+9G7r/i15pCWIx+xNTd+yZh8SJIDD9qWV/+fz+Wgtdty5ssfnVtusqw3PdJqPMeEWO7JyuuSrJ9xHDfecJjpmxcfkONO2JID1959mMoDHzbY9O6Kiw8YR1iwbGy+abA4xfQuRkxOb0/a1OT8zwgWatXqqfzO6y/IPe91Z179ikflO1cZZAGTZFknK621La2123ccSTaOOya+79P/sD4r90ue/KzvD19ZtXo6T/z5W3LJhQfmxmv8rTDszrr7bE+taPn2hw5ImzGictN1K3PDF9fk0P+wc+US9iUrVrT85lkX5oEn35rX/fYpufTiw8YdEuzMppCL0qtxOFW1Nsn9ZjQdX1UPTXJLa+3b44mKhbrs3w7Kpz6wPs8749qsP2J7rrlyTU7/uVty1L225uyXm/gIl73joGzduCJ33TCooHz3E/tn0/WDP5/0rDuy/2HTOeEZd+ab716bjz33iNzr9Luy7c4VufydB2VqS+XBz7dDN/u2X/2Ni/PoH74uF3zmqKxbtzVPeOJ37nb9Ex/x/xpY7nqVrCQ5JcknZpyfPfx6bpLnjjwaFu31L753nvPd63LaM27NuvVTufKS/XPms4/P1y5YO+7QYOwu+Yt1ufOa7/9n+Dv/dGC+80+DPx//U5uyet1UHvk7t+XQk7blm393UC46e32S5PCHbM1jfv/WHPWDli5m33bC/TYkSR516vV51KnX73RdsgLLX6+SldbaJ2PJ44mybcuK/Nlrjs2fvebYcYcCvfO0j1+3x3tW7Jec9Kw7c9Kz7hxBRLC8nPEbPzTuEGDPxjEsyzCwgaq6Z5LHJTkyyd+11q6uqpUZTHbf0FqzAQAAALAgC0pWarC9/P9J8uvDd7QkX81g6eC1Sa5KcmaSNy5FkAAAsBxVG8MO9hNUWVnoamCvSPLiJG9IcnpmDN1qrW1I8t4kz1h0dAAAwD5rocPA/kuSt7fWfquqDt/F9a8k+YmFhwUAABPAnJVFWWhl5V5J/nU31+9McvAC3w0AAIxIVT2uqj5YVddUVauqp826XlV1VlVdW1V3VdVHq+r+e/HeF1XVVVW1uaouqKpHzje2hSYrN2SQsMzlEUnsiwIAAP13UJIvJ3nRHNf/R5L/luQFSR6VQWHi/Kraf64XVtXPZ7ANyauTPHz4/vOr6sj5BLbQZOW9SV5QVSfMaGvDwJ6YwZ4o717guwEAYDIsgx3sW2vntdZe2Vp73+xrw4W1XpLkd1trf99a+0qSZyc5NsnTdvPalyV5S2vtra21f88g0dmU5FfmE9tCk5XfSXJtkouSvD2DH8n/rKrPJDkvgzkrr13guwEAgMVbV1UHzzjWLOAdxyc5OslHdzQMF9S6IMljdvVAVa3OYKTVzGemh+e7fGYuC0pWhgE+Osnrk9wzyeYkP5LkkAxKPT/cWtu0kHcDAMCk2LF08aiPoauTbJhxnLGAb+Ho4dfrZ7VfP+PabEckWTnPZ3ZpwZtCttbuSvK7wwMAAOiX45JsnHG+ZVyBLNRCh4EBAAD9trG1dvuMYyHJynXDr0fNaj9qxrXZbkoyNc9ndmmhO9j/xV7c1lprv7qQ9wMAwERoNThG3efSuTKDBOO0DOarp6oOzmBVsD/eZfetba2qC4fPvH/4zIrh+Tnz6Xyhw8B+NDuvM7AyyTHDrzdmsKQZAADQY1W1Nsn9ZjQdX1UPTXJLa+3bVfXGJK+sqsszSF5ek+SaDBOR4Ts+luR9rbUdycjZSc6tqi8m+XwGK4odlOSt84ltQclKa+2+u2qvqlVJfm0YzOkLeTcAAEyM5bGD/SlJPjHj/Ozh13Mz2JLk9RkkGm/OYEGtzyR5Umtt84xnTsxgYv0ghNb+tqrukeSsDCbVXzR8Zvak+91a8AT7XWmtbUtyTlU9KIMSz08u5fsBAICl1Vr7ZJI5x4611lqSM4fHXPfcdxdt52Sew75mW9JkZYYvJ/nljt4NAADLwqylhEfW56ToajWw0zPYoRIAAGBBFroa2FwloEOSPC7Jw5P8/gJjAgAAWPAwsFfN0X5rkm8meUGStyzw3QAAMBmWxwT73lroamA2kwQAADo176Sjqg6oqrOr6qe6CAgAACZG+/4k+1Edk1RZmXey0lq7K4O9VI5a+nAAAAAGFjqc68IkJy9lIAAAADMtdIL9S5J8qKq+luRtrbXtSxcSAABMCBPsF2Wvk5WqelySS1prNyY5N8l0kj9N8odV9d0kd816pLXW/uOSRQoAAOxT5lNZ+USSZyV5Z5Kbk9yU5LIuggIAgImgsrIo80lWaniktfb4TqIBAAAYWuicFQAAYA++t5zwiPucFPNdDWyCvnUAAKDP5pusvKOqpvbysEIYAACwYPMdBvbRJF/vIhAAAICZ5pusnNta++tOIgEAAJjBBHsAAOiKpYsXZb5zVgAAAEZCsgIAAPTSXg8Da61JbAAAYB7ss7I4EhAAAKCXTLAHAIAuTVClY9RUVgAAgF5SWQEAgK5YunhRVFYAAIBekqwAAAC9ZBgYAAB0xNLFi6OyAgAA9JLKCgAAdMUE+0VRWQEAAHpJsgIAAPSSYWAAANARE+wXR2UFAADoJZUVAADoign2i6KyAgAA9JLKCgAAdEVlZVFUVgAAgF6SrAAAAL1kGBgAAHTE0sWLo7ICAAD0ksoKAAB0xQT7RVFZAQAAekmyAgAA9JJhYAAA0BXDwBZFZQUAAOgllRUAAOiIpYsXR2UFAADoJckKAADQS4aBAQBAV0ywXxSVFQAAoJdUVgAAoCMm2C+OygoAANBLKisAANAVc1YWRWUFAADoJckKAADQS4aBAQBAVwwDWxSVFQAAoJdUVgAAoCM1PEbd56RQWQEAAHpJsgIAAPSSYWAAANAVE+wXRWUFAAD2YVV1VVW1XRxvmuP+5+7i3s1dxKayAgAAHak2OEbd5zz9YJKVM85PTvJPSd69m2duT3LSjPNOvkvJCgAA7MNaazfOPK+q30zyzST/vPvH2nWdBhbDwAAAoDttTMcCVdXqJM9K8hettd29aW1VfauqvlNVf19VD154r3OTrAAAwGRaV1UHzzjW7MUzT0tySJK37eaey5L8SpKnZpDYrEjyr1V13OLC3ZlkBQAAJtPVSTbMOM7Yi2d+Ncl5rbVr5rqhtfbZ1trbW2sXtdb+OcnPJLkxya8tQcx3Y84KAAB0aXxLCR+XZOOM8y27u7mq7pPkxzJIPvZaa21bVf1bkvvNO8I9UFkBAIDJtLG1dvuMY7fJSpLnJbkhyT/Op5OqWpnkIUmuXWCcc1JZAQCAjiyTpYtTVSsySFbOba1tn3Xt7Um+21o7Y3h+ZpLPJflGBvNbXpHkPkn+bDFx74pkBQAA+LEk907yF7u4du8k0zPOD03yliRHJ7k1yYVJHtta+/elDkqyAgAA+7jW2keS1BzXHj/r/KVJXjqCsCQrAADQmUXue7LgPieECfYAAEAvqawAAEBHlssE+75SWQEAAHpJZQUAALpizsqiqKwAAAC9JFkBAAB6yTAwAADoiAn2iyNZgX3cXz3wuHGHAMvOt9998LhDgGVjatPm5NnjjoLlSrICAABdMcF+UcxZAQAAekmyAgAA9JJhYAAA0BXDwBZFZQUAAOgllRUAAOiIpYsXR2UFAADoJZUVAADoijkri6KyAgAA9JJkBQAA6CXDwAAAoCPVWqqNdlzWqPvrksoKAADQSyorAADQFRPsF0VlBQAA6CXJCgAA0EuGgQEAQEfsYL84KisAAEAvqawAAEBXTLBfFJUVAACgl1RWAACgI+asLI7KCgAA0EuSFQAAoJcMAwMAgK6YYL8oKisAAEAvqawAAEBHTLBfHJUVAACglyQrAABALxkGBgAAXTHBflFUVgAAgF5SWQEAgA5N0oT3UVNZAQAAekllBQAAutLa4Bh1nxNCZQUAAOglyQoAANBLhoEBAEBH7GC/OCorAABAL6msAABAV2wKuSgqKwAAQC9JVgAAgF4yDAwAADpS04Nj1H1OCpUVAACgl1RWAACgKybYL4rKCgAA0EuSFQAAoJcMAwMAgI7YwX5xVFYAAIBeUlkBAICutDY4Rt3nhFBZAQAAekllBQAAOmLOyuKorAAAAL0kWQEAAHrJMDAAAOiKHewXRWUFAADoJZUVAADoiAn2i6OyAgAA+7CqelVVtVnHpXt45plVdWlVba6qr1bVk7uITbICAABcnOSYGcepc91YVY9N8s4kf57kYUnen+T9VXXyUgdlGBgAAHRl+exgv721dt1e3vviJB9urf3v4flvV9XpSX49yQsW0vlcVFYAAGAyrauqg2cca3Zz7/2r6pqquqKq/qqq7r2bex+T5KOz2s4fti8pyQoAAHRkxwT7UR9DVyfZMOM4Y44wL0jy3CRPSvLCJMcn+XRVrZvj/qOTXD+r7fph+5IyDAwAACbTcUk2zjjfsqubWmvnzTj9SlVdkORbSX4ug3kpYyNZAQCArox3U8iNrbXb5/14a7dV1deT3G+OW65LctSstqOG7UvKMDAAAOB7qmptkhOTXDvHLZ9NctqsttOH7UtKsgIAAPuwqnpDVf1IVd13uCzx+5JMZbA8carq7VX1uhmP/EGSJ1XVy6vqgVX1qiSnJDlnqWMzDAwAADqyTHawPy6DxOTwJDcm+UySR7fWbhxev3eS6R03t9b+tap+McnvJnltksuTPK219rXFRb4zyQoAAOzDWmu/sIfrj99F27uTvLurmHaQrAAAQFem2+AYdZ8TwpwVAACglyQrAABALxkGBgAAXRnvPivLnsoKAADQSyorAADQkcoYli4ebXedUlkBAAB6SWUFAAC60trgGHWfE0JlBQAA6CXJCgAA0EuGgQEAQEeqjWGC/eSMAlNZAQAA+kllBQAAumJTyEVRWQEAAHpJsgIAAPSSYWAAANCRai014n1PRt1fl1RWAACAXlJZAQCArkwPj1H3OSFUVgAAgF5SWQEAgI6Ys7I4KisAAEAvSVYAAIBeMgwMAAC6Ygf7RVFZAQAAekllBQAAutLa4Bh1nxNCZQUAAOglyQoAANBLhoEBAEBHqg2OUfc5KVRWAACAXlJZoVOrVk/n2a+4Lqc949asXT+VKy85IOe+/uh86VPrxh0a9JLPDMyt7prKwR+4Kasv35TV37grK++cys3/9bjc+YRD73bfQR+9JQd96rasumZLVtw5lalD98vmB6/NhmcemakjV48pevZZJtgvisoKnXr5G7+Tn3n+jfn4+w7NH595z0xPJ6/5yyvy4EfeMe7QoJd8ZmBuKzZOZf17bsiq727JtvvuP+d9q6+8K9uPXJXbn3pEbvkvx+bOxx2SA/5tY47+zW9k5S3bRhgxsFgqK3TmpIduyhOedlvectYxec+fHJkk+eh7Ds2bP35Z/vMrr81Lf/r+Y44Q+sVnBnZv6tD9cvWbH5jpQ1dl9Tc35ejf/OYu77v1v9xzp7ZNP7g+x/zmN3LQP9+a259+ZNehwvfU9OAYdZ+TQmWFzpz6lNsytT350DsO/17bti0r8uF3HpYHnbIp9zh26xijg/7xmYE9WLUi04euWtCjU0cOnqs7J+i3ONgH9CpZqaozquoLVbWxqm6oqvdX1UnjjouFud/Jd+XqK9Zk0x0r79Z+2UUHJklOePBd4wgLestnBpbWio3bs2LD9qz+5qYc9qarkyRbHnLQmKMC5qNvw8B+JMmbknwhg9hem+QjVfWg1tqdY42MeTvsyO255fqd/wZsR9vhR20fdUjQaz4zsLTu+WuXprYNJhpPrVuZW37lmGz+jxarYMRMsF+UXiUrrbUnzTyvqucmuSHJI5J8ahwxsXCr95/Otq21U/vWLYO2NfsrxcNMPjOwtG74rfumtrWsunpzDvr0bVmx2WcIlpteJSu7sH749ZZdXayqNUnWzGjy1yU9snXziqxavXNmv3rNoG3L5l6NQoSx85mBpbXl5LVJks0PW5e7fvDgHP2yyzO9/4rc8RNHjDky9ilteIy6zwnR2//zVdWKJG9M8i+tta/NcdsZSTbMOK4eTXTsjVtu2C+HHbXzEpE72m6+vu+5MoyWzwx0Z/vRa7Lt+ANy0KdvG3cowDz0NlnJYO7KyUl+YTf3vC6D6suO47gRxMVe+ubFB+S4E7bkwLVTd2t/4MM2JUmuuPiAcYQFveUzA92qrdNZsclQMFhOepmsVNU5SZ6S5AmttTmrJa21La2123ccSTaOLEj26NP/sD4r90ue/Kybv9e2avV0nvjzt+SSCw/MjdfYRRhm8pmBJTDVUndM7dS8+vJNWfXtzdl6oqSf0arWxnJMil6NKaiqSvJHSZ6e5PGttSvHHBKLcNm/HZRPfWB9nnfGtVl/xPZcc+WanP5zt+Soe23N2S+/17jDg97xmYE9W3veTVmxafp7O9EfcOHt3/vzxicN9ii65wsuzabHrs+2e61J239FVn1rcw765K2ZPnBlNvysDSFhOelVspLB0K9fTPLUJBur6uhh+4bWmg0GlqHXv/jeec53r8tpz7g169ZP5cpL9s+Zzz4+X7tg7bhDg17ymYHdO/iDN2W/G78/t+vAC27PgRfcniS584cPydSh++WO0w7N/hffmQM/tyG1tWXqsP2y6YcOyYZnHJmpI1UoGTFLFy9KtR59M1U1VzDPa629bS+ePzjJhsfnqdmvFrbDLQDsybff/ZBxhwDLxtSmzfnms1+XJOuHw/b3CTt+L33CI87IfvvtP9K+t2/fnE9cOBk/815VVlprO28wAAAAy1VLMup1HfpTi1i0Xk6wBwAAkKwAAAC91KthYAAAMEnGsZTwJC1drLICAAD0ksoKAAB0pWUMSxePtrsuqawAAAC9JFkBAAB6yTAwAADoih3sF0VlBQAA6CWVFQAA6Mp0khpDnxNCZQUAAOglyQoAANBLhoEBAEBH7GC/OCorAABAL6msAABAVyxdvCgqKwAAQC+prAAAQFdUVhZFZQUAAOglyQoAAOzDquqMqvpCVW2sqhuq6v1VddIennluVbVZx+aljs0wMAAA6MryGAb2I0nelOQLGeQHr03ykap6UGvtzt08d3uSmUnNkn+jkhUAANiHtdaeNPO8qp6b5IYkj0jyqd0/2q7rMDTJCgAAdGY6SY2hz4F1VXfrfEtrbctevGH98Oste7hvbVV9K4OpJV9K8luttYvnEekembMCAACT6eokG2YcZ+zpgapakeSNSf6ltfa13dx6WZJfSfLUJM/KIK/416o6bpEx343KCgAATKbjkmyccb43VZU3JTk5yam7u6m19tkkn91xXlX/muSSJL+W5LfnHekcJCsAANCRai014gn2M/rb2Fq7fa+fqzonyVOSPK61dvV8+mytbauqf0tyv/k8tyeGgQEAwD6sBs5J8vQkP9pau3IB71iZ5CFJrl3K2FRWAACgK8tj6eI3JfnFDOafbKyqo4ftG1prdyVJVb09yXdba2cMz89M8rkk30hySJJXJLlPkj9bbPgzSVYAAGDf9sLh10/Oan9ekrcN/3zvzFxnLDk0yVuSHJ3k1iQXJnlsa+3flzIwyQoAAHRluiU14srK9Pz6a63tcXHl1trjZ52/NMlL59XRApizAgAA9JJkBQAA6CXDwAAAoCvLY4J9b6msAAAAvaSyAgAAnRlDZSUqKwAAAJ2SrAAAAL1kGBgAAHTFBPtFUVkBAAB6SWUFAAC6Mt0y8gnv89zBvs9UVgAAgF5SWQEAgK606cEx6j4nhMoKAADQS5IVAACglwwDAwCArli6eFFUVgAAgF5SWQEAgK5YunhRVFYAAIBekqwAAAC9ZBgYAAB0xQT7RVFZAQAAekllBQAAutIyhsrKaLvrksoKAADQSyorAADQFXNWFkVlBQAA6CXJCgAA0EuGgQEAQFemp5NMj6HPyaCyAgAA9JLKCgAAdMUE+0VRWQEAAHpJsgIAAPSSYWAAANAVw8AWRWUFAADoJZUVAADoynRLMuJKx7TKCgAAQKdUVgAAoCOtTae10W7SOOr+uqSyAgAA9JJkBQAA6CXDwAAAoCutjX7Cu6WLAQAAuqWyAgAAXWljWLpYZQUAAKBbkhUAAKCXDAMDAICuTE8nNeJ9T+yzAgAA0C2VFQAA6IoJ9ouisgIAAPSSygoAAHSkTU+njXjOSjNnBQAAoFuSFQAAoJcMAwMAgK6YYL8oKisAAEAvqawAAEBXpltSKisLpbICAAD0kmQFAADoJcPAAACgK60lGfG+J4aBAQAAdEtlBQAAOtKmW9qIJ9g3lRUAAIBuSVYAAIBeMgwMAAC60qYz+gn2I+6vQyorAABAL0lWAACgI226jeVYiKp6UVVdVVWbq+qCqnrkHu5/ZlVdOrz/q1X15AV1vBuSFQAA2MdV1c8nOTvJq5M8PMmXk5xfVUfOcf9jk7wzyZ8neViS9yd5f1WdvJRxSVYAAKArbXo8x/y9LMlbWmtvba39e5IXJNmU5FfmuP/FST7cWvvfrbVLWmu/neRLSX59IZ3PRbICAAD7sKpaneQRST66o621Nj08f8wcjz1m5v1D5+/m/gWZyNXAtmdbMjl74QDQM1ObNo87BFg2pu/aMu4Qxmocv5duz7Ydf1xXVTMvbWmt7eofyBFJVia5flb79UkeOEc3R89x/9HzCnYPJi1ZWZckn8mHxh0HAJPs2X8/7ghgOVqX5PZxBzFCW5Nc95l8aEl/eZ+HO5JcPavt1UleNfpQFm7SkpVrkhyXZOO4A2En6zL4wPjnA3vHZwbmx2em39Zl8HvaPqO1trmqjk+yetyxzDBXmeumJFNJjprVflSS6+Z45rp53r8gE5WstNZaku+OOw52NqMEubG1ti/9rQosiM8MzI/PTO/tk/9MWmubk/R+3GhrbWtVXZjktAxW9UpVrRienzPHY58dXn/jjLbTh+1LZqKSFQAAYEHOTnJuVX0xyeeTvCTJQUnemiRV9fYk322tnTG8/w+S/HNVvTzJPyb5hSSnJHn+UgYlWQEAgH1ca+1vq+oeSc7KYJL8RUme1FrbMYn+3kmmZ9z/r1X1i0l+N8lrk1ye5Gmtta8tZVw1GDkF3aqqNUnOSPK6OVahAGbwmYH58ZmBySRZAQAAesmmkAAAQC9JVgAAgF6SrAAAAL0kWQEAAHpJskKnqmpNVf2vqrqmqu6qqguq6vRxxwV9VVVrq+rVVfXhqrqlqlpVPXfccUEfVdUPVtU5VXVxVd1ZVd+uqndV1QPGHRuwNCQrdO1tSV6W5K+SvDjJVJIPVdWp4wwKeuyIJGcm+Q9JvjzmWKDv/meSZyT5WAb/j3lzkscl+VJVnTzOwIClYeliOlNVj0xyQZJXtNbeMGzbP8nXktzQWnvsOOODPhruFXFoa+26qjolyReSPK+19rbxRgb9U1WPTfLF1trWGW33T/LVJO9prT1rbMEBS0JlhS79bAaVlDfvaGitbU7y50keU1X3Gldg0FettS2ttevGHQcsB621f52ZqAzbLk9ycQbVSWCZk6zQpYcl+Xpr7fZZ7Z8ffn3oaMMBYNJVVSU5KslN444FWDzJCl06Jsm1u2jf0XbsCGMBYN/wS0numeRvxx0IsHiSFbp0QJItu2jfPOM6ACyJqnpgkjcl+WySc8ccDrAEJCt06a4ka3bRvv+M6wCwaFV1dJJ/TLIhyc+21qbGHBKwBPYbdwBMtGszKMXPdszw6zUjjAWACVVV65Ocl+SQJD/cWvP/F5gQKit06aIkD6iqg2e1P2rGdQBYsOGS+B9M8oAkT2mt/fuYQwKWkGSFLr0nycokz9/RMNxD4nlJLmitfWdcgQGw/FXVygwm0j8myTNba58dc0jAEjMMjM601i6oqncneV1VHZnkG0mek+S+SX51nLFBn1XVr2cwnGXHink/VVXHDf/8R621DWMJDPrn/yT56QwqK4dV1d02gWytvWMsUQFLxg72dGpYnn9NkmclOTTJV5L8dmvt/LEGBj1WVVcluc8cl49vrV01umigv6rqk0l+ZK7rrbUaXTRAFyQrAABAL5mzAgAA9JJkBQAA6CXJCgAA0EuSFQAAoJckKwAAQC9JVgAAgF6SrAAAAL0kWQEAAHpJsgKwl6rqqqp624zzx1dVq6rHjy2oWWbHuJv7WlW9agHvf+7w2VMWEt8c73xVVdmhGICdSFaAZWHGL8k7js1V9fWqOqeqjhp3fPNRVU9eSKIAAPua/cYdAMA8nZnkyiT7Jzk1yQuTPLmqTm6tbRpxLJ9KckCSrfN87slJXpTkVUsdEABMEskKsNyc11r74vDPf1ZVNyd5WZKnJnnnrh6oqoNaa3cudSCttekkm5f6vQDAgGFgwHL38eHX45Okqt5WVXdU1YlV9aGq2pjkr4bXVlTVS6rq4uEwsuur6k+r6tCZL6yBV1bV1VW1qao+UVUPnt3xXHNWqupRw75vrao7q+orVfXiHfFlUFXZMW+kzZyvsdQx7q2quk9V/b+quqyq7qqqm6vq3VV13zkeOXAY181VdXtVvX12jMP3/kRVfXr4c9hYVf+4mDgB2LeorADL3YnDrzfPaNsvyflJPpPkvyfZMTzsT5M8N8lbk/xhBgnOryd5WFX9UGtt2/C+s5K8MsmHhsfDk3wkyeo9BVNVpyf5hyTXJvmDJNcl+Q9JnjI8/9MkxyY5Pckv7+IVncc4hx9M8tgkf5Pk6iT3zWCI3Ser6kG7GGJ3TpLbMhjKdtLw3vtU1eNba234s/jlJOdm8M/ifyY5cHjfZ6rqYa21qxYYKwD7CMkKsNysr6ojMpiz8kMZzGG5K4MEYYc1Sd7dWjtjR0NVnZrkPyf5pdbaX89o/0SSDyd5ZpK/rqp7JPkfSf4xyU/N+MX795L81u4Cq6qVGSQb1yZ5aGvtthnXKklaa5+tqq8nOb219o5Zz3ce4278Y2vtPbPi+WCSzyZ5RpK/nHX/1iSn7UiequpbSV6f5KeSfKCq1maQbP1Za+35M955bpLLhnE+PwCwG4aBAcvNR5PcmOQ7GVQB7kjy9Nbad2fd98ezzp+ZZEOSf6qqI3YcSS4cvuMJw/t+LIPqxB/tSAKG3rgXsT0sg0rIG2cmKkky611zGUWMu9Rau2vHn6tqVVUdnuQbGVRPHr6LR948o8qTDH7e2zNYPCAZVI4OSfLOWd/LVJILZnwvADAnlRVguXlRkq9n8Ivx9UkuG050n2l7BkOZZrp/kvVJbpjjvUcOv95n+PXymRdbazdW1a17iG3HkLSv7eG+uYwixl2qqgOSnJHkeUnumaRmXF6/i0dm931HVV2bwfCxZPC9JN+fUzTb7QuJE4B9i2QFWG4+P2M1sLls2UUCsyKDJOCX5njmxkVHtnjjjPGPMkhU3pjB0K8NSVoG1auFVOF3PPPLGczbmW37At4JwD5GsgLsK76ZwfCpf5k55GkXvjX8ev8kV+xoHM4T2Wm1q130kSQnZzBcbS5zDQkbRYxz+dkk57bWXj7jfftnMJRrV+6f5BMz7l2b5JgMJvsn3/9Z3NBa293PAgDmZM4KsK94V5KVSX579oWq2q+qDhmefjTJtiS/sWNS/NBL9qKPL2WwYeVLZrxvRx8z33XnsO1u94woxrlM5e5Dv5LkN4bx7Mrzq2rVjPMXZvAXYOcNz8/PYKjXb826L8n3EisA2C2VFWCf0Fr756r60yRnVNVDM1jmd1sGFYJnJnlxkvcM5328IYP5G/9QVR/KYOL8TyS5aQ99TFfVC5N8MMlFVfXWDFYGe2CSByf58eGtFw6//mFVnZ9kqrX2N6OIcTf+IckvV9WGJP+e5DEZVHlunuP+1Uk+VlXvymDp4v+awVLRHxj+LG4f/iz+MsmXqupvMhjGdu8kP5nkXzJYkhkA5iRZAfYZrbUXVNWFSX4tyWszmDdxVZJ3ZPDL8w6vzGBn+hdksGrVBUmemMFSwXvq4/yqekKS30ny8gwq2N9M8pYZt703gzkiv5DkWRlUNP5mVDHO4cUZVFd+KYNlof8lg2Tl/Dnu//XhvWclWZXknUn+28zVyVprf11V1yT5zSSvyGBJ6e8m+XQG+8gAwG7V3q2mCQAAMFrmrAAAAL0kWQEAAHpJsgIAAPSSZAUAAOglyQoAANBLkhUAAKCXJCsAAEAvSVYAAIBekqwAAAC9JFkBAAB6SbICAAD0kmQFAADoJckKAADQS/8/OA47EBRXanAAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -510,7 +519,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABSsAAALGCAYAAACtT8T3AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABS/ElEQVR4nO3deZxdZX0/8M83CQlbCPsqm0hxQRGiKFYRXHHBVrFq0Z/i0p8LtqXuuBTUVq37bl1QrP4ErFBFcLfgUnFDURRRQMMiYSdhD0ue3x/3Dr0Mk2RyZ5I5k3m/X6/7utxznvOc770zh5n55DnPU621AAAAAABMtVlTXQAAAAAAQCKsBAAAAAA6QlgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEANYZVbVJVX2wqhZV1e1V1arqgVV1QP+/j57qGum2/vfJ6aO2Hd3ffsAU1XRY//yHjdq+qKoWTUVNAzVM6WcDAKx7hJUAwJTqBx1tkrp7Z5K/T3J2krcneXOSyyapbxhKVR3b/z7fZaprWV0rCkoBANaUOVNdAADAJHpykj+01g4e3FhV956ielg3fDjJ8UkumqLz/1eSHydZPEXnX5mp/mwAgHWMsBIAWJdsn+T7U10E65bW2lVJrprC8y9NsnSqzr8yU/3ZAADrHreBAwCdU1W79G89Pbb/38dX1VVVdUtV/byqnjyq/en9W8krySNHbi0fPffgGOdZ4Zx/K5uLr6ru3a/t4qq6taour6ovVNUeK+hrw6p6bb/266vqhqr6XX9+zW3GaHtkVZ1VVTf2255RVX+7svcyxjkfUFXH9d/jsqq6sqp+UVXvr6r1RrWdXVUvqar/qaqlVXVzVZ1fVZ+qqt1HtV1QVW+vqt/3vx7XVtU3q+oxY9Rw51yhVbVvVZ1aVdeMviW6qv62qk6rqiX9Pn9XVW+sqnlj9PmIqvpqVV3Sf1+XVdWPq+qo1fhs5lbVm6rqgn4ff6qqfxnrfP32Y34vjKeW/vfl8/ov/zTwvblooM3p/W1zq+qf+5/tsqo6tr9/pbdi978mH66qP/c/v3Oq6h+qqka1W+ncraOvh/7185n+y88M1H7n128V18mjq+ob/a/5sqr6Q1W9o6oWjNF25DOYU1Wvr6rz+sdcXFX/VlVzx6oZAFj3GFkJAHTZzkl+muSPST6XZPMkz0zylap6TGvttH67Y5OcnuSoJBf2XyfJoskuqKoOSnJSkvWSfDXJ+UnukeRpSZ5UVQe21n4x0H6zJKcl2SvJ75N8OsmtSXZL8vx+X5f3226a5L+T7J3kF/22s5I8PskXqup+rbU3jqPGByT5SZKW5OQkf0qySZJ7JXlZkjcmua3fdm6SU5I8NsnFSb6Q5LokuyR5apIfJjlvoL7/SXLfJD9L8v4kWyZ5RpJvVdVLW2sfH6Ok/ZIc2e/r0/1jbu33+en+53BJkhOTLEny0CRvTfLoqnpsa+32gc/+1H59Jyf5c3rfE/fpv683j+OzqSRfTPJXSS5I7zbmuUlekOT+qzp+oJ/x1vLmJH+d3tf/A/33l4HnQScmeXCSryf5cpIrxlHK3CTfSbJperdjz01ySP9ceyQ5fLzvaQzH9uv8qyRfSXLWwL4lKzuwql6c5GNJbkzyn+m9lwOSvDbJwVX1l621sfr4QpJHpPcZXJfkiUlek2Tr9L5PAIB1nLASAOiyA5Ic3Vq7M4Sqqi8k+UaSV6cXAqa1dmx/31FJFrXWjl4TxfSDx+OS3JRk/9baOQP79kxvXsFPJdln4LCPpBdU/XuSw1tryweO2TjJ7IG2708vqHxta+2dA+3WTy+8en1Vfam1dtYqSn1ekvWT/HVr7StjvIebBjYdnV5Q+dUkf9NaWzbQdl56IeeIf0svqPxEkpe01lq/3b8l+XmSD1bVN1tri0bV87h++7sEmf2Rgs9Pb07GZ7fWbh7Yd3R64fPh6QVvSfJ36YW3B7TWfjWqry1X+Gnc1d+mF779OMmBrbVb+scflV4AO17jqqW1dnR/FOJeSd4/xmczaOcke/ZvrR6v7dIL8/cc+doNvJeXVdUJrbWhpkZorR3bH5z5V0m+PHKdrUpV7Zzkg0luSLJva+3cgX0fTfLS9BbD+r9jHL5bkvu11q7pt39Dkl8leW5VHdlas2AWAKzj3AYOAHTZhUn+ZXBDa+2b6S3mse8U1PPc9EawHTUYVPbr+k2STybZu6rumyRVtXV6I0EXJ3nVYFDZP+aG/nyEqaotkjwnyc8Hg8p+u1vSG5FWSQ5djXpvHr2htXbtSB1VNTu9UYA3pxcmLhvVdllr7cp+27n9+m5IcuRIUNlvd1564dTc9D6j0c5awYjLf0xye5IXDAaVfW9NcnWSZ4/zfY034BsZnff6kaCyf/w1/XOuronUMtqbhjz2yMGv3aj3MhWjEZ+T3vfChweDyr43JLk+yf9ZwW33rx0JKpOktXZjkv+X3t8tD1pD9QIAHWJkJQDQZWe11u4YY/vF6d1avLaNnHOvFcz79xf95/skOSe9W3pnJfl+P3RZmQenN8pyRXMKjswzeZ9x1HlCekHgl6vqS+ndJvw/rbULRrW7d5IFSX7SWrt0FX3ukWTDfj/XjLH/v9O7vXzvMfb9dPSGqtowvdGGVyU5YtT0iiOW5a7v9/+ld7v9T6rqhPRG1v5Pa+2SVdQ+aJ8ky9O7JX2001ejn8moZbS7fU7jcHuSH42x/fT+81hfjzVtZGTxf4/e0Vq7tqp+mWT/9L7/fjWqyc/H6O/i/vNmk1YhANBZwkoAoMuWrGD77ZmaO0S26D//3Srabdx/3rT//OfV6PvB/ceq+l6h1tpPq+oR6Y1ie3qS/5MkVfX7JG9urR03RH0ji6IsXsH+ke2bjrFvrFt3N0tvpOhW6d3uvUqttZOqt7jSK9ObY/LFSVJVZ6Y3uvDb4+hmQZJrWmu3jbPONVnL0OcfcNUKAv2Rvu62mM1aMPT3ygrmsby9/zx7jH0AwDrGbeAAwEy2PCv+x9tNx9i2tP+8V2utVvL4bL/dkv7zDuOoZaTv962i7wPH88Zaa2e01p6cXij4l+ndFrxNegv1jKzcPUx9265g/3aj2t2lnJX098tVvN+7DLlsrZ3aWntUeu/r0Unel+R+SU4Zuf1+HO9j8xq1Inrfit7bmCahltH9jfU5rcqW/dv5Rxt5L4Nfj5FpCFbne34YE/leAQBmOGElADCTXZtkmxUEV2PNj/fj/vMjxtn/T9MLiPavqo3G2Xa8fY9Lf97JH7XW/jnJP/Q3/1X/+dz0AssHVNX2q+jq9+ktzLNXf1Xw0UZC1F+MsW+sum5I8tsk96uqzcdzzKjjb2yt/Xdr7RVJ3pbeHIlPGMehv0jvd+CHj7HvgNWtY5y1jIx8XBMjA+ckedgY2w/oP/9yYNu1/ecdRzeuqntl7FGYw9Q+cs4DRu/of+88MMktSX63Gn0CADOEsBIAmMl+ml7Yc5dFSPqrVP/lGO0/k164d1RV3W2Bn6qaVVUHjLzuL05zfHojyd5dVbNGtd+4qhb0216R3jyID6qqN401Wq6qdquqXVf1pqrqYVW1wRi7tuk/39Q/5x1JPppkgyT/PnrBk6qaW1Vb9dve2q9vfkYtRFNVu6UXhN6W5HOrqm/Ae9ML9j49VgBaVZtV1T4Dr/evqrFGBd7lfa3CZ/rP/9pfZX2k783Tm3NzXFazlqv7zzuNt//V9PbBr92o9/KZgXbnJrkuyV/1F38aab9BegskjWWY2j+f3vfC3/dD0EFvTW+F+c+PXtAJACAxZyUAMLN9KL2g8mNV9ej0FvJ4YHoL6ZyS5MmDjVtrV1fV05P8V5IfV9V30xsd2NIbrbZfenNPrj9w2MuT7JnkJUkOqKpvJrk1ya5JHp/kKfnfxVBenmT3JG9Jb7XkHya5PMn26S008+Akf5vkT6t4X69J8qiq+kG/7Q3p3Z78hPRG131ioO2bkzwkycFJ/lBVp6S3WvOOSR6X5NVJju23fV16Iz9fXlUPTm9RmS2TPCO9EPPlrbVV1Xan1tqnq2pheiuSX9D/bC5Ksnn/89k/vbDtJf1DPphkh6r6nySL0vscFyZ5VHorxx8/jtMel94K7U9J8puq+kp6ixc9PcnPkuw2zvJXp5bvpvc5frKqTkzv813SWvvwOM+1MouTzEvvvZyc/30v2yX5aGvt+yMNW2u3VdUHkrwpyS+r6r/S+3vgsUku7T9GOyO94PWI/or1I3NhfmhkJfvRWmuLquqIJB9J8ouq+mKSK5M8Mr1r5Nz0VrcHALgbYSUAMGO11s7pz9/4tvTCutuT/CC9QOVpGRVW9o/5blU9IMmr0gsbH5FeUHVpeqsfnziq/bVV9bAkR6QXkv3f9G6tvTjJp9NbNXyk7XVV9ch+m0OTHJJe8Hl5kvOS/FOS8Szc8tH0QsmHpHe785wkl/S3v6e1duHAOW+tqoPSCwSfm+R56S18c2l6oewPB9peU1X7JTmy//m8IsnN6Y1QfVdr7VvjqO0uWmuHV9XX++d/THrzJl6TXmj5rvRG6Y14W5KnpneL/mPSu23+ov7297fWrs0qtNZaVf1NesHrYekFxIvTC0Xfkt7tyeMx7lpaa9+sqlemtzDTEemNJr0wyWSElbf2z/+2JM9KLzz+Y5J3pBfGj3ZUeuHj36X3fXZZesHq0Rn4Xhyo/dqqOqR/3GFJRqYz+HxWMudka+2jVXV+etfJIemtJH9xel/Tt61gIR0AgNRw83gDAAAAAEwuc1YCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE+ZMdQFrW1VVku2TXD/VtQAAAADANDU/yaWttTaZnc64sDK9oPKSqS4CAAAAAKa5eyT582R2OBPDypERlfeI0ZUAAAAAsLrmpzcYcNKztZkYVo64vrV23VQXAQAAAADTSW+WxTXDAjsAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADphJi+wAwAAADCjnHnmmfOTbBcD2Li75UmuTnLtwoULl09VEdVam6pz301VvTTJS5Ps0t/02yRvaa19vb//9CSPHHXYx1trL1mNc2ySZGmSBVYDBwAAAGaCM888c1aS18+ePfu5VbVekjW3nDPTVmvt9tbaZcuXL393kpNXFFquyXytayMrL0nyuiTnpXfRPC/JV6pq79bab/ttPpnknweOuWntlggAAAAw7bx+vfXWe+m2225760YbbXRTVXVn9Bqd0FrL7bffPmfp0qW7Llmy5EO33Xbb/ZL869quo1MjK8dSVdckeXVr7Zj+yMqzWmtHTKA/IysBAACAGePMM8/cZPbs2T/ffvvt19t6662vnup66L7LL798i8WLF994xx13PGzhwoV3y8/WZL7W2fkJqmp2VT0ryUZJzhjY9eyquqqqflNVb6+qDVfRz7yq2mTkkWT+mqwbAAAAoGO2rar1NtpoI3enMi4bb7zxjVW1cZJt1/a5u3YbeKrq/umFk+snuSHJU1tr5/R3fyHJhUkuTfKAJP+WZI8kT1tJl0cmOWqNFQwAAADQbbOSlFu/Ga+qO6c0XesDHTsXVib5fZIHJlmQ5OlJPltVj2ytndNa+8RAu7OranGS71bVbq21C1bQ39uTvHfg9fz05sYEAAAAADqkc2Fla+3WJOf3X55ZVQ9O8o9JXjxG85/0n++VZMywsrW2LMmykdcDyTAAAAAA0CGdnbNywKwk81aw74H958VrpxQAAAAAWHP23XffPfbdd989prqOqdKpkZVV9fYkX09yUXq3ax+a5IAkj6+q3fqvv5bk6vTmrHxfku+31n49JQUDAAAAAJOmU2Flkq2T/EeS7dJb/vzXSR7fWvt2Ve2Y5DFJjkhvhfCLk5yY5F+mplQAAACA6W2X1526cKprSJJF73jSmVNdA93QqbCytfbCley7OMkj12I5AAAAAMxgy5cvz0033VQbb7yxldTXkukwZyUAAAAA3M0rXvGK7atq4W9+85t5hxxyyC7z589/4Pz58x/49Kc/fZfrr7/+ztzrtttuy6tf/ertdtxxxz3nzp27zw477HD/l7/85TvcfPPNd1mJeYcddrj/gQceeK8TTzxxkz333PM+G2ywwT7vfe97tzrllFPmV9XCT33qU5u98pWv3G7rrbd+wEYbbbT3QQcddM+rr7569s0331wveMELdtx888332nDDDfd++tOfvsvovj/wgQ9s8dCHPvQvNt98873mzp27z2677Xa/f/u3f9tqbX1W00WnRlYCAAAAwOp6xjOecc8dd9zx1je+8Y1//uUvf7nhCSecsOVWW21128c+9rE/J8mznvWsXU466aQtDjrooGsPP/zwy3/6059u9JGPfGTb3//+9+t/+9vfvmCwrz/+8Y/rv+AFL7jnc57znCuf97znXXmf+9xn2ci+97znPdutv/76y//xH//xsvPPP3/eZz/72a0PO+ywVlVZunTp7Ne85jWX/uQnP9noxBNP3GKXXXZZ9u53v/vORaE/9alPbb3HHnvc/MQnPnHJnDlz2te+9rVNX/e61+20fPnyHHnkkVeuvU+r24SVAAAAAExre+65501f/OIXLxx5fc0118w5/vjjt/zYxz725zPOOGODk046aYtnPvOZVx1//PEjba588YtffPsnPvGJbb761a/OP/jgg68fOfaiiy6a96Uvfem8Qw455LqRbaeccsr8JLnjjjvy4x//+Pfz5s1rSXLVVVfNOfXUUzd/xCMesfR73/ve+SN977333usfd9xxWw6GlWeccca5g7eTv/71r7/yEY94xO4f/ehHtxFW/i+3gQMAAAAwrR1++OF3Cfv+8i//8volS5bMueaaa2adfPLJC5LkNa95zeWDbd7whjdcliRf/epXFwxu32GHHW4dDCoHPfOZz7x6JKhMkn333ffG1loOO+ywqwfb7bPPPjdedtllc2+77bY7tw0GlVdfffXsxYsXz3n4wx9+/SWXXDLv6quvnr3ab3odZWQlAAAAANPaPe95z1sHX2+22WZ3JL2RjxdeeOHcWbNm5X73u9+ywTY77bTT7fPnz7/j4osvnju4fccdd7xLu1HH3OU8CxYsuCNJdt5557ttX758ea6++urZ22677R1J8q1vfWujo48+eodf/vKXG91yyy13GUB4zTXXzN5iiy3uGP87XncJKwEAAACY1ubMGTviau1/F/GeNWvWuFb0Xn/99Zev7nnmzJkzZt+ttUqS3/72t/MOPvjgPXbddddb3vKWt1y800473TZv3rzlp5xyyoJjjjlmm+XLV3jKGUdYCQAAAMA6a+edd751+fLlOfvss9ffZ599bhnZfvHFF8+5/vrrZ++44463ruz4yXDiiScuuPXWW+urX/3q+bvvvvud5/vud7+7yZo+93RjzkoAAAAA1llPecpTlibJu9/97m0Gt7/tbW/bJkkOPvjgpWu6htmze1NSDo70vPrqq2efcMIJW6zpc083RlYCAAAAsM7ab7/9bn7a05529XHHHbfl0qVLZz/iEY+4/mc/+9lGJ5100haPecxjlgyuBL6mPPnJT1765je/+R5PfvKT7/X85z//yhtuuGH25z73uS0333zz26+88sr11vT5pxMjKwEAAABYpx1//PGLXvnKV176q1/9aqM3velNO/7oRz+af/jhh1928skn/3FtnH+vvfZaduyxx15QVXnzm9+847HHHrvVc5/73Ctf+tKXXr7qo2eWGhx+OhNU1SZJliZZ0Fobcxl6AABg3bHL605dp/7oWfSOJ9VU1wBML2eeeea958yZ843dd9/9hg033PCWVR/BTHfTTTetf9555218++23H7Rw4cJzR+9fk/makZUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAAzRFUtfMUrXrH9VNexInOmugAAAAAApsjRCxZOdQlJkqOXnjnVJXTJv//7v29+xRVXzPnnf/7nK6a6lrXNyEoAAAAA6JATTjhh849//OPbTHUdU0FYCQAAAABjuOmmm+qOO+4Yc991110nV1sDfKgAAAAATGt/+tOf1nvGM56x89Zbb/2AuXPn7rPDDjvc/9nPfvZOt9xySyXJOeecM/cJT3jCPRcsWPDADTbYYO+99trr3scff/yCwT5OOeWU+VW18BOf+MRm//AP/7D91ltv/YCNN954n2uvvXb2IYccssuGG264929/+9t5j3zkI++10UYb7f20pz1t1yTZYYcd7n/IIYfsMrqmfffdd4999913j9H9f/KTn9zs5S9/+Q5bbrnlXhtssMHej3rUo+51/vnnrzd43Omnn77g0ksvnVtVC6tq4Q477HD/kf0333xz/dM//dP2O+20055z587dZ9ttt33AS17yknvcfPPNNXj+m2++uV74whfuuNlmm+210UYb7f2oRz3qXhdccMF66ThzVgIAAAAwbS1atGi9hzzkIfe5/vrrZx966KFX3fve9775z3/+89yvfvWrm91www2zrrzyytp///3vc8stt8x6wQtecPkWW2xx+3HHHbfls5/97HvdeuutFzz3uc9dMtjfO9/5zu3XW2+9dvjhh1+2bNmyWfPmzWtJcscdd9QTnvCE3R/84AffcPTRR1+84YYbLh+m3ne9613bVVX+/u//fvEVV1yx3jHHHLPNYx7zmD1+85vf/HbjjTduRx555OLXv/7197jsssvW+9d//deLk2T+/PnL+zXksY997L3OPPPMjQ899NCr7nOf+9x89tlnb/CpT31q6/PPP3/ed77znQtGzvO3f/u3u3zlK1/Z/OCDD75mv/32u+H000/f5IlPfOLuQ3/Qa4mwEgAAAIBp6xWveMUOV1999XqnnXba7/bff/+bRra///3vv3T58uX5u7/7ux2vvvrqOd/4xjd+//jHP/6GJDniiCOuut/97ne/I488csdnP/vZS2bPnn1nf8uWLauzzjrrnI033rgNnufWW2+tgw8++NqPfOQjf55IvUuXLp1z7rnn/mazzTZbniQLFy686YUvfOE93//+92/1xje+8YqnPvWp133wgx+89brrrpv9spe97JrBYz/+8Y9vfsYZZ2zyta997c73kiR77rnnza95zWt2/va3v73RYx/72BvPOOOMDb7yla9s/pznPOfKz33ucxclyZFHHnnlU57ylF3/8Ic/bDCR+tc0t4EDAAAAMC3dcccd+fa3v73pgQceuGQwqBwxa9asfPe7311w//vf/8bBcG/BggXLn/vc51556aWXzv3FL36x/uAxz3zmM68eHVSOOOKII66caM1/8zd/c/VIUJkkhx122LVbbbXVbd/85jcXrOy4JDnxxBM3u+c973nLAx7wgFsWL148Z+Rx0EEHXZ8k3/nOd+Ynycknn7wgSV71qlddPnj8K1/5ysvv3mu3GFkJAAAAwLR06aWXzrnhhhtm3/e+9715RW0WL148d++9975h9Pb73ve+tyTJBRdcMO/BD37wLSPbd91112Vj9TN79ux2z3ve89aJ1rz77rvfMvh61qxZ2WmnnZZdcsklc1d17KJFi9b/4x//uP7222+/11j7r7jiivWS5MILL5w7a9as3Pe+973Le7n//e9/y1jHdYmwEgAAAAD6NtxwwzFHVc6dO7cN3i6+KnfccUdWp/14LF++PLvvvvvN73znOy8ea/+uu+464TB1qgkrAQAAAJiWtt9++9s33njjO84555wVzsO43Xbb3XrBBResP3r77373u/WTZLfddhtzJOV4LViw4PalS5feLZW89NJL5+644453Cw/PO++8u9SyfPnyXHTRRfP22GOPO0eHVtXow5IkO++887Lf/e53Gz7lKU+5ftasFc/uuPPOO9+6fPnynHPOOfP22muvO9/f2WeffbfPoWvMWQkAAADAtDR79uw89rGPXXLaaadt+v3vf3/D0fuXL1+eRz/60UvPPvvsjb7zne9sNLL9uuuum/Uf//EfW26//fa37rPPPhO6NXrnnXdedtZZZ218yy233JkwHnfccQsuu+yyMW/r/s///M8trr322jszuWOPPXazK6+8cr3HPe5xS0e2bbjhhsuvv/76uwWghxxyyLVXXHHFeu9973u3HL3vhhtuqOuuu25Wkhx88MFLk+Td7373NoNt3vOe92wz+riuMbISAAAAgGnrPe95z59/8IMfbPL4xz9+j0MPPfSq+9znPjcvXrx4vZNPPnnzM84449yjjz568Ve+8pXNn/rUp+7+whe+8IrNN9/89uOOO27LP//5z/OOPfbYCyZ6q/aLXvSiq77xjW9sdsABB+z+tKc97doLLrhg3kknnbT5jjvuOOaIzQULFtz+0Ic+9N7Pfvazr7r88svXO+aYY7bZaaedlh1xxBFXjbTZe++9bzz11FM3e9GLXnSPBz/4wTfNnz//jkMPPXTpy172sqtPPPHEzV7zmtfs/L3vfW/+fvvtd8Mdd9xR55577vqnnnrq5ieffPIf9t9//5se9rCH3fzkJz/5ms9//vNbXXfddbP322+/G0477bRNFi1aNG9Cb3YtEFYCAAAAzFRHLz1zqkuYqF133fW2H/3oR+e+9rWv3f6//uu/Nv/85z8/e+utt771wAMPvG7jjTdevuWWW7bvf//7v3vlK195j09/+tNb33rrrbP+4i/+4qYvfOEL5z/rWc9auuozrNwhhxxy3VFHHXXJxz72sW3e9KY37bjnnnveeNJJJ53/yle+csex2r/qVa9a/Otf/3rDD3zgA9vddNNNs/bbb7/rPvGJT1w0f/78O1cIf/WrX33lr371qw2/+MUvbnnMMcfM3n777W899NBDz549e3a++c1vXvDWt7516xNOOGHLb33rW5utv/76y3fcccdlL3rRiy7fc8897xwlesIJJyw6/PDDb//yl7+8+be//e1NH/rQh17/ta997bx73eteD5joe16TqrUx5wxdZ1XVJkmWJlnQWrtuqusBAADWrF1ed+o69UfPonc8aeyJzABW4Mwzz7z3nDlzvrH77rvfsOGGG3Z+Neh11SmnnDL/4IMP/otPf/rTf3z+859/7VTXszI33XTT+uedd97Gt99++0ELFy48d/T+NZmvmbMSAAAAAOgEYSUAAAAA0AnCSgAAAACgEyywAwAAAABr2JOf/OTrW2vTfkGjNc3ISgAAAACgE4SVAAAAAOu25UnSWpvqOpgmBr5Xlq/tcwsrAQAAANZtV7fWbr/99ttNB8i43Hbbbeu11m5PsmRtn1tYCQAAALBuu7a1dtnSpUvnT3UhdF9rLUuWLFmwfPnysxYuXHjF2j6/RB0AAABgHbZw4cLlZ5555ruXLFnyoXnz5m2x8cYb31hVU10WHdNay2233bbekiVLFixZsuT61trHp6IOYSUAAADAuu/k22677X6LFy/+26raeKqLoZtaa7cvX778h621jy9cuPDbU1GDsBIAAABgHbdw4cLlSf71zDPP/FCSbWNqQO5ueZIlU3Hr9yBhJQAAAMAMsXDhwuuSXDfVdcCKSNEBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE7oVFhZVS+tql9X1XX9xxlV9YSB/etX1Ueq6uqquqGqTqyqbaayZgAAAABgcnQqrExySZLXJVmY5EFJ/jvJV6rqfv3970tycJK/SfLIJNsnOWkK6gQAAAAAJtmcqS5gUGvtq6M2vaGqXprkoVV1SZIXJjm0tfbfSVJVz0/yu6p6aGvtx2u5XAAAAABgEnVtZOWdqmp2VT0ryUZJzkhvtOV6Sb4z0qa1dm6Si5Lst5J+5lXVJiOPJPPXbOUAAAAAwDA6F1ZW1f2r6oYky5L8e5KnttbOSbJtkltba0tGHXJ5f9+KHJlk6cDjkkkvGgAAAACYsM6FlUl+n+SBSR6S5GNJPltV951Af29PsmDgcY+JFggAAAAATL5OzVmZJK21W5Oc3395ZlU9OMk/Jjkhydyq2nTU6Mptkly2kv6WpTdKM0lSVZNeMwAAAAAwcV0cWTnarCTzkpyZ5LYkjx7ZUVV7JNkpvTktAQAAAIBprFMjK6vq7Um+nt6iOfOTHJrkgCSPb60trapjkry3qq5Jcl2SDyU5w0rgAAAAADD9dSqsTLJ1kv9Isl16i+H8Or2g8tv9/f+UZHmSE9MbbfnNJC+bgjoBAAAAgEnWqbCytfbCVey/Jcnh/QcAAAAAsA6ZDnNWAgAAAAAzgLASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AmdCiur6siq+llVXV9VV1TVl6tqj1FtTq+qNurx71NVMwAAAAAwOToVViZ5ZJKPJHlokscmWS/Jt6pqo1HtPplku4HHa9ZmkQAAAADA5Jsz1QUMaq0dNPi6qg5LckWShUm+P7DrptbaZWuxNAAAAABgDevayMrRFvSfrxm1/dlVdVVV/aaq3l5VG66og6qaV1WbjDySzF9j1QIAAAAAQ+vUyMpBVTUryfuT/E9r7TcDu76Q5MIklyZ5QJJ/S7JHkqetoKsjkxy15ioFAIB1yy6vO7VNdQ0AwMzU2bAyvbkr90zy8MGNrbVPDLw8u6oWJ/luVe3WWrtgjH7enuS9A6/nJ7lksosFAAAAACamk2FlVX04yZOT7N9aW1Ww+JP+872S3C2sbK0tS7JsoO/JKhMAAAAAmESdCiurlyR+KMlTkxzQWvvTOA57YP958ZqqCwAAAABY8zoVVqZ36/ehSf4qyfVVtW1/+9LW2s1VtVt//9eSXJ3enJXvS/L91tqvp6JgAAAAAGBydC2sfGn/+fRR25+f5NgktyZ5TJIjkmyU5OIkJyb5l7VSHQAAAACwxnQqrGytrXRCydbaxUkeuZbKAQAAAADWollTXQAAAAAAQCKsBAAAAAA6QlgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE6YM1kdVdWGSZ6VZF6Sr7XWLpysvgEAAACAdd9QYWVVHZPkIa21Pfuv5yb5cZI9+02WVtWjWmu/nJwyAQAAAIB13bC3gR+Y5KSB14emF1Q+u/98WZKjJlYaAAAAADCTDBtWbptk0cDrv07y89baca21c5J8MslDJlYaAAAAADCTDBtW3phk0ySpqjlJDkjyzYH91ydZMJHCAAAAAICZZdgFdn6R5O+q6rQkT0kyP8lXB/bvluTyCdYGAAAAAMwgw4aVb0hvJOXPk1SSL7XWfjqw/6lJ/meCtQEAAAAAM8hQYWVr7edVde8kD0uypLX2vZF9VbVpko8mOX0yCgQAAAAAZoah5qysqv2TpLX2lcGgsr9tSZIvxJyVAAAAAMBqGHaBndOSPHYl+x/VbwMAAAAAMC7DhpW1iv3zktwxZN8AAAAAwAw07jkrq2qnJLsMbLr3yO3go2ya5MVJLpxQZQAAAADAjLI6C+w8P8lRSVr/8Yb+Y7RKb1TliydcHQAAAAAwY6xOWPnFJL9JL4z8YpIPJvnBqDYtyY1JzmqtXT4pFQIAAAAAM8K4w8rW2u+S/C5Jqur5Sb7fWvvTmioMAAAAAJhZVmdk5Z1aa5+d7EIAAAAAgJltqLAySarqPunNY3nPJJvl7iuEt9baoydQGwAAAAAwgwwVVlbV/0nymSS3Jfl9kmvHajaBugAAAACAGWbYkZVHJ/llkie01q6avHIAAAAAgJlq1pDHbZ/k04JKAAAAAGCyDBtW/jq9wBIAAAAAYFIMG1a+IskLq+phk1kMAAAAADBzDTtn5WuTLE3yg6o6J8lFSe4Y1aa11v5qIsUBAAAAADPHsGHlA5K09ELKjZPcd4w2bdiiAAAAAICZZ6iwsrW2yyTXAQAAAADMcMPOWQkAAAAAMKnGNbKyqnZKktbaRYOvV2WkPQAAAADAqoz3NvBFSVpVbdBau3Xk9TiOmz1kXQAAAADADDPesPIF6YWTt416Pamq6sgkT0ty7yQ3J/lRkte21n4/0Gb9JO9J8qwk85J8M8nLWmuXT3Y9AAAAAMDaM66wsrV27MpeT6JHJvlIkp+lV9vbknyrqu7bWrux3+Z9SZ6U5G+SLE3y4SQnJfnLNVQTAAAAALAWDLUa+GhVtUGStNZunkg/rbWDRvV7WJIrkixM8v2qWpDkhUkOba39d7/N85P8rqoe2lr78UTODwAAAABMnaFXA6+qnarqM1V1eZIbktxQVZdX1aeraudJqm9B//ma/vPCJOsl+c5Ig9bauUkuSrLfCuqcV1WbjDySzJ+k2gAAAACASTTUyMqquneSHybZNMm3k/yuv+veSZ6b5OCqevjgXJNDnGNWkvcn+Z/W2m/6m7dNcmtrbcmo5pf3943lyCRHDVsHAAAAALB2DHsb+DuSLE+yd2vt7MEdVbVnku/22zx1ArV9JMmeSR4+gT6S5O1J3jvwen6SSybYJwAAAAAwyYa9DfyRST44OqhMkv4oyA8nOWDYoqrqw0menOTA1tpgsHhZkrlVtemoQ7bp77ub1tqy1tp1I48k1w9bFwAAAACw5gwbVq6XZGWL6dzUb7NaqufD6Y3IfFRr7U+jmpyZ5LYkjx44Zo8kOyU5Y3XPBwAAAAB0x7Bh5S+TvKi/Ovdd9BexeWGSXwzR70eSPCfJoUmur6pt+4+R1caXJjkmyXur6sCqWpjkM0nOsBI4AAAAAExvw85ZeVSSbyQ5t6o+k+QP/e17JHleki2SHD5Evy/tP58+avvzkxzb/+9/Sm++zBOTzEvyzSQvG+JcAAAAAECHDBVWttb+u6qemORdSV43avdZSf5Pa+20IfqtcbS5Jb0gdJgwFAAAAADoqGFHVqa19p0ke1fVtkl27m++sLU25kI3AAAAAAArM3RYOaIfTgooAQAAAIAJGXaBnVTVVlX17qo6p6pu6j/O6W/bZjKLBAAAAADWfUOFlVV1vyRnJ3lFkqVJ/rP/WNrf9uuq2nOyigQAAAAA1n3D3gb+kSSzkzyktfazwR1VtW+SryX5UJIDJ1YeAAAAADBTDHsb+L5JPjA6qEyS1tpPk3wgyUMmUhgAAAAAMLMMG1ZekeSWley/pd8GAAAAAGBchg0r35/kpVW17egdVbV9kpf22wAAAAAAjMuwc1bOSnJDkvOr6r+SnN/fvnuSv+6/nlVVrxg4prXW3jdsoQAAAADAum3YsPLdA//97DH2P2BUmyRpSYSVAAAAAMCYhg0rd53UKgAAABiXXV53apvqGibTonc8qaa6BgC6Y6iwsrV24WQXAgAAAADMbMMusAMAAAAAMKmElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0wrrCyqv6hqv5iTRcDAAAAAMxc4x1Z+b4kDxp5UVV3VNWha6YkAAAAAGAmGm9YeW2SbQZe1xqoBQAAAACYweaMs93pSY6uqgcmWdrf9tyqeuhKjmmttX+cQG0AAAAAwAwy3rDyZUnen+RxSbZO0vr//biVHNOSCCsBAAAAgHEZ123grbUrWmuHtta2a63NTu828Oe01mat5DF7zZYOAAAAAKxLxjtn5WjPT/KjySwEAAAAAJjZxnsb+F201j478t9Vdd8kO/dfXthaO2cyCgMAAAAAZpahwsokqaq/SvLeJLuM2v6nJK9orZ08sdIAAAAAgJlkqNvAq+qJSU7sv3x9kqf2H69Pbz7Lk6rqoEmpEAAAAACYEYYdWfmmJL9O8ojW2o0D20+uqg8n+WGSo5J8Y4L1AQAAAAAzxLAL7DwgyWdHBZVJkv62Y/ttAAAAAADGZdiw8pYkm69k/+b9NgAAAAAA4zJsWPnfSf6xqvYbvaOqHpLkH5J8ZyKFAQAAAAAzy7BzVr4myRlJflhVP03y+/72PZLsm+SKJK+deHkAAAAAwEwx1MjK1tqf0puT8oNJNkvyzP5jsyQfSLJXa23RJNUIAAAAAMwAw46sTGvtiiT/1H8AAAAAAEzIsHNWAgAAAABMKmElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOWO2wsqo2rKozq+ola6IgAAAAAGBmWu2wsrV2U5Jdk7TJLwcAAAAAmKmGvQ38G0keP5mFAAAAAAAz27Bh5VuT/EVVfa6qHl5VO1TV5qMfk1koAAAAALBumzPkcb/tP983yaEraTd7yP4BAAAAgBlm2LDyLTFnJQAAAAAwiYYKK1trR09yHQAAAADADDfsnJV3UVULqsot3wAAAADA0IYOK6vqQVX1jaq6KcnVSR7Z375lVX2lqg6YnBIBAAAAgJlgqLCyqh6W5IdJdk/y+cF+WmtXJVmQ5MWTUSAAAAAAMDMMO7LybUl+l95q4K8fY/9pSR4ybFEAAAAAwMwzbFj54CSfaa0ty9irgv85ybZDVwUAAAAAzDjDhpW3reLYHZLcMGTfAAAAAMAMNGxY+eMkTx9rR1VtlOT5Sb43bFEAAAAAwMwzbFh5VJIHVdWpSZ7Q37ZXVb0oyZlJtkry1kmoDwAAAACYIYYKK1trP0nyxCT3SvIf/c3vSfKJJLOTPLG19utJqRAAAAAAmBHmDHtga+2/k+xRVXunF1rOSnJBkjNba2MtugMAAAAAsEJDh5UjWmu/TPLLSagFAAAAAJjBhg4rq2pekr9L73bwXfqbFyX5WpJPtdZumWhxAAAAAMDMMdSclVV1jyRnJflgkr2SXNl/7NXfdla/DQAAAADAuAy7GvhHkuyc5BmttR1aa4/sP3ZI8swkO/XbAAAAAACMy7C3gT86yftaa18avaO19p9VtU+Sv59QZQAAAADAjDLsyMrrk1yxkv2X9dsAAAAAAIzLsGHlZ5IcVlUbjt5RVRsneX6SYyZSGAAAAAAws4zrNvCqetqoTb9M8qQk51bVZ5Oc39++e5LnJrkmya8nq0gAAAAAYN033jkrv5SkJan+68H/fsMY7e+R5LgkX5xQdQAAAADAjDHesPLANVoFAAAAADDjjSusbK19b00XAgAAAADMbMMusAMAAAAAMKnGexv43VTVw5O8IMk9k2yW/53DckRrre01gdoAAAAAgBlkqLCyql6R5F1Jbkny+/RW/wYAAAAAGNqwIytfneR/khzcWls6ifUAAAAAADPUsHNWbpjk/wkqAQAAAIDJMmxYeVqS+09mIQAAAADAzDZsWPn3SR5dVa+qqs0nsyAAAAAAYGYaKqxsrV2c5ONJ3pHkyqq6saquG/VwizgAAAAAMG7Drgb+liRvSPLnJD9PIpgEAAAAACZk2NXAX5Lk1CR/3VpbPlnFVNX+6a00vjDJdkme2lr78sD+Y5M8b9Rh32ytHTRZNQAAAAAAU2PYOSvnJjl1MoPKvo2S/CrJ4Stp8430gsyRx99Ocg0AAAAAwBQYdmTlKUkekd68lZOmtfb1JF9PkqpaUbNlrbXLJvO8AAAAAMDUG3Zk5ZuT3LeqPlpVC6tqq6rafPRjMgsdcEBVXVFVv6+qj1XVFitrXFXzqmqTkUeS+WuoLgAAAABgAoYdWfn7/vMDk7x4Je1mD9n/inwjyUlJ/pRktyRvS/L1qtqvtXbHCo45MslRk1wHAAAAADDJhg0r35KkTWYh49FaO37g5dlV9eskFyQ5IMl3V3DY25O8d+D1/CSXrJECAQAAAIChDRVWttaOnuQ6htJa+2NVXZXkXllBWNlaW5Zk2cjrlcyFCQAAAABMoWHnrOyEqrpHki2SLJ7qWgAAAACAiRlqZGVV/fM4mrXW2ltXs9+N0xslOWLXqnpgkmv6j6OSnJjksvTmrHxnkvOTfHN1zgMAAAAAdM+wc1YevZJ9LUn1n1crrEzyoCSnDbwemWvys0lemuQBSZ6XZNMklyb5VpI39W/1BgAAAACmsWHnrLzb7eNVNSvJzkkOT7J/kicM0e/p6QWdK/L41e0TAAAAAJgeJm3Oytba8tban1prr0pyXpIPTVbfAAAAAMC6b00tsPP9JE9cQ30DAAAAAOugNRVWPijJ8jXUNwAAAACwDhp2NfDnrmDXpunNV/m0JJ8asiYAAAAAYAYadjXwY1ey76ok70jyliH7BgAAAABmoGHDyl3H2NaSXNtau34C9QAAAAAAM9RQYWVr7cLJLgQAAAAAmNmGHVl5p6raOMlmSWr0vtbaRRPtHwAAAACYGYZdYGf9JEcleWGSLVbSdPYw/QMAAAAAM8+wIys/muR5Sb6c5AdJrp2sggAAAACAmWnYsPJpST7VWnvxZBYDAAAAAMxcs4Y8riX5xWQWAgAAAADMbMOGlV9J8pjJLAQAAAAAmNmGDSvfmuSeVfWJqlpYVVtV1eajH5NZKAAAAACwbht2zsrz+s97p7ci+IpYDRwAAAAAGJdhw8q3pDdvJQAAAADApBgqrGytHT3JdQAAAAAAM9ywc1YCAAAAAEwqYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCZ0KK6tq/6r6alVdWlWtqv561P6qqrdU1eKqurmqvlNVu09RuQAAAADAJOpUWJlkoyS/SnL4Cva/Jsk/JHlJkockuTHJN6tq/bVTHgAAAACwpsyZ6gIGtda+nuTrSVJVd9lXvQ1HJPmX1tpX+tuem+TyJH+d5Pi1WCoAAAAAMMm6NrJyZXZNsm2S74xsaK0tTfKTJPut6KCqmldVm4w8ksxf45UCAAAAAKttOoWV2/afLx+1/fKBfWM5MsnSgcclk18aAAAAADBR0ymsHNbbkywYeNxjassBAAAAAMbSqTkrV+Gy/vM2SRYPbN8myVkrOqi1tizJspHXo+fCBAAAAAC6YTqNrPxTeoHlo0c29OegfEiSM6aqKAAAAABgcnRqZGVVbZzkXgObdq2qBya5prV2UVW9P8kbq+q89MLLtya5NMmX13KpAAAAAMAk61RYmeRBSU4beP3e/vNnkxyW5J1JNkryiSSbJvlhkoNaa7esvRIBAAAAgDWhU2Fla+30JCucVLK11pL8c/8BAAAAAKxDptOclQAAAADAOkxYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCfMmeoCAAAAmLl2ed2pbaprmEyL3vGkmuoaAKYzIysBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADohGkVVlbV0VXVRj3Oneq6AAAAAICJmzPVBQzht0keM/D69qkqBAAAAACYPNMxrLy9tXbZeBtX1bwk8wY2zZ/8kgAAAACAiZqOYeXuVXVpkluSnJHkyNbaRStpf2SSo9ZKZV1x9II21SXQAUcvrakuAQAAZppdXnfqOvX32KJ3PMnfFcBaNa3mrEzykySHJTkoyUuT7JrkB1W1stGSb0+yYOBxjzVcIwAAAAAwhGk1srK19vWBl7+uqp8kuTDJM5Ics4JjliVZNvK6yj8KAQAAAEAXTbeRlXfRWluS5A9J7jXFpQAAAAAAEzStw8qq2jjJbkkWT3UtAAAAAMDETKuwsqreXVWPrKpdquphSf4ryR1Jjpvi0gAAAACACZpWc1amtzjOcUm2SHJlkh8meWhr7coprQoAAAAAmLBpFVa21p411TUAAAAAAGvGtLoNHAAAAABYdwkrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE6YM9UFAADAdLfL605tU10DAMC6wMhKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBPmTHUBwBpy9II21SUAHXH00prqEgAAYFV2ed2p69TfsYve8SS/hw/ByEoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCXOmugAAANaCoxe0qS5hXbZo/amuYHx2ueULU10CwJTa5XWnrlM/Dxe940k11TXAZDOyEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE6YlmFlVR1eVYuq6paq+klV7TvVNQEAAAAAEzPtwsqqemaS9yZ5c5J9kvwqyTerauspLQwAAAAAmJBpF1YmeUWST7bWPtNaOyfJS5LclOQFU1sWAAAAADARc6a6gNVRVXOTLEzy9pFtrbXlVfWdJPut4Jh5SeYNbJo/8lxVa6rUKbX0dfNX3QiAGWNB1SZTXQNTz+8HJMnyZTdNdQnANFPr2O8ROx7xxakuYVL5+nTbuvb1GWWN/XJZrbU11fekq6rtk/w5ycNaa2cMbH9nkke21h4yxjFHJzlqrRUJAAAAADPDPVprf57MDqfVyMohvT29OS4HbZ7kmimoZRjzk1yS5B5Jrp/iWoDxc+3C9OTahenJtQvTk2sXpqeRa/feSS6d7M6nW1h5VZI7kmwzavs2SS4b64DW2rIky0Ztvm7yS1szBm5Vv761Nm3qhpnOtQvTk2sXpifXLkxPrl2Yngau3cVtDdyyPa0W2Gmt3ZrkzCSPHtlWVbP6r89Y0XEAAAAAQPdNt5GVSe+W7s9W1c+T/DTJEUk2SvKZqSwKAAAAAJiYaRdWttZOqKqtkrwlybZJzkpyUGvt8iktbM1ZluTNufut7EC3uXZhenLtwvTk2oXpybUL09MavXan1WrgAAAAAMC6a1rNWQkAAAAArLuElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grOyYqtquqt5RVadV1fVV1arqgNU4/uj+MaMft6y5qoGJXrv9Pnaoqi9W1ZKquq6qvlJV91wzFQMjqmrTqvpEVV1ZVTf2r+N9xnnssSv4uXvumq4bZoKqmldV/1ZVl1bVzVX1k6p67DiP9XMVpsiw166/Z2FqVdXGVfXmqvpGVV3Tv/4OW43jh/69etCc1T2ANW6PJK9Ncl6Ss5PsN2Q/L01yw8DrOyZYF7ByE7p2q2rjJKclWZDkbUluS/JPSb5XVQ9srV09ueUCSVJVs5KcmmSvJO9KclWSlyU5vaoWttbOG0c3y5K8aNS2pZNaKMxcxyZ5epL3p/cz9rAkX6uqA1trP1zRQX6uwpQ7NkNcuwP8PQtTY8sk/5zkoiS/SnLAeA+cpN+rkwgru+jMJFu01q6pqqcn+c8h+/lSa+2qSawLWLmJXrsvS7J7kn1baz9Lkqr6epLfJHllktdPZrHAnZ6e5GFJ/qa19qUkqaovJvlDkjcnOXQcfdzeWvv8misRZqaq2jfJs5K8urX27v62/0jvZ+M707t2V8TPVZgiE7x2R/h7FqbG4iTbtdYuq6oHJfnZahw7Gb9XJ3EbeOe01q5vrV0zCV1VVW1SVTUJfQGrMAnX7tOT/GzkD6p+n+cm+W6SZ0y0PmCFnp7k8iQnjWxorV2Z5ItJ/qqq5o2nk6qaXVWbrJkSYcZ6enqjqT4xsqG1dkuSY5LsV1U7ruJYP1dhakzk2h3h71mYAq21Za21y4Y8fFJ+r06EleuyP6Z3C9r1VfX5qtpmqgsCxtYfLv+AJD8fY/dPk+xWVfPXblUwY+yd5BetteWjtv80yYZJ/mIcfWyY5LokS/tz+3ykfwsqMDF7J/lDa+26Udt/2n9+4FgH+bkKU26oa3cUf8/C9DMZv1cncRv4uujaJB9OckZ6c2g9IsnhSfatqgeN8QMDmHqbJ5mX3pD70Ua2bZ/k92utIpg5tkvy/TG2D157Z6/k+MXp3dL2i/T+Efig9G4/3auqDmit3T6JtcJMs11W/bNxLH6uwtQa9tpN/D0L09lEf6++k7ByDer/q+7ccTZf1lprEz1na+0DozadWFU/TfL/0vvj6R0TPQes66bg2t1gpK8x9t0yqg2wAkNeuxtkAtdea+3IUZuOr6o/JPnX9G6FOX6c9QB3N+z16ecqTK2hf7b6examtQn9Xj3IbeBr1v5Jbh7nY481VURr7QtJLkvymDV1DljHrO1r9+b+81hzeKw/qg2wYsNcuzdn8q+99yVZHj93YaKGvT79XIWpNak/W/09C9PGpF37RlauWecmef442441TH4yXZzeLTHAqq3ta/ea9P4Farsx9o1su3QSzgPrumGu3cWZ5GuvtXZzVV0dP3dhohYn2WGM7au6Pv1chak17LW7Mv6ehe6btN+rhZVrUH8FpWOnuo7+Cmq7JPnlFJcC08LavnZba8ur6uwkDxpj90OS/LG1dv3aqgemqyGv3bOSPKKqZo2aDPwhSW5K8ofVraO/cMeWSa5c3WOBuzgryYFVtcmoeeoeMrD/bvxchSl3Voa4dlfE37MwbZyVSfq92m3g01hV7VRV9x61basxmr40yVZJvrFWCgNWaqxrN8mXkjy4qh400G6PJI9K8p9rsz6YYb6UZJskTxvZUFVbJvmbJF9trS0b2L5bVe028Hr9Fawo/KYkFT93YaK+lGR2kv87sqGq5qU3gvonrbWL+9v8XIVuGfra9fcsTA9VtV1V3buq1hvYPO7fq1fZ/ySs6cIkq6o39v/zfkmeleTTSf6UJK21fxlod3qSR7bWamDbTUlOSG+FpVuSPLzfx6+S/GVr7aa18BZgRprgtTs/vX8tnp/k3UluS/KK9H7Re2BrzQgtWAOqanaSHybZM8m7klyV3gT+OyV5cGvt9wNtFyVJa22X/utd0rtuj0vvFvQkeXySJ6b3B9WTRv2rMrCaquqLSZ6a3lyw5yd5XpJ9kzy6tfb9fpvT4+cqdMoErl1/z8IUq6qXJ9k0vdW7X5rkpPzvyOYPtdaWVtWx6V3Xu7bWFvWPG/fv1ausQVjZPVW1wi/KqP+Rn567/8/9k0kelmTH9CYxvTDJiUn+1e0usGZN5Nrtb79Her/QPS69ke+nJ/mn1tr5a6JeoKeqNkvvF6q/Tm+Vwp8leVVr7eej2i1K7hJWbprkQ0kemt4vc7PT+4Ps/yV5d2vttrVRP6zLqmr9JG9N8pwkmyX5dZI3tda+OdDm9Pi5Cp0y7LXr71mYev3feXdewe5dW2uLxgor+8eO6/fqVdYgrAQAAAAAusCclQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAKaZqjqsqlpV7TLk8btX1beqamm/n7+eaJ8AADAZ5kx1AQAArHWfTbJrkjckWZLk50keM5UFAQBAIqwEAJiOPpfk+CTLVvfAqtogyX5J/rW19uGB7ZNXHQAADMlt4AAA00xr7Y7W2i2ttTbE4Vv1n5dMYkkMqKqNproGAIDpSlgJADDNjDW/ZFUtqqpTqurhVfXTqrqlqv5YVc8daHN0kgv7L9/V72PRSs7T+seM3r6oqo4dtW3Tqnp/VV1cVcuq6vyqem1VzRrVblZV/WNVnd2v8cqq+kZVPWhUu+dU1ZlVdXNVXVNVx1fVjuP4bOb361jUr+OKqvp2Ve0zqt1DquprVXVtVd1YVb+uqn8c1eZRVfWD/v4lVfWVqrrPqDZH9z+n+1bVF6rq2iQ/nOj7AACYqdwGDgCw7rhXki8lOSa9eSlfkOTYqjqztfbbJCelN6LyfUmOS/K1JDdM9KRVtWGS7yXZIcnHk1yU5GFJ3p5kuyRHDDQ/JslhSb6e5FPp/T76iCQPTW/uzFTVG5K8NckX+222SvL3Sb5fVXu31paspJx/T/L0JB9Ock6SLZI8PMl9kvyi3/9jk5ySZHGSDyS5rL//yf3XqarH9Gv8Y5Kjk2zQr+F/qmqf1tqiUef9zyTnJXl9kpqE9wEAMCMJKwEA1h17JNm/tfaDJKmqLya5OMnzk7yqtfbrqrouvbDyF621z0/SeV+RZLcke7fWzutv+3hVXZrk1VX1ntbaxVV1YHpB5Qdba4OjGN9T/Ukzq2rnJG9O8sbW2ttGGlTVSUl+meRlSd6WFXtSkk+21l45sO2dA/3MTi9QXZzkgYOB4UgNfe9Kck2S/Vpr1/T3f7lfw5uTPG/UeX/VWjt0oK+Jvg8AgBnJbeAAAOuOc0aCyiRprV2Z5PdJ7rmGz/s3SX6Q5Nqq2nLkkeQ7SWYn2b/f7pAkLb0Q7y4G5t98Wnq/o35xVF+XpTdy8cBV1LIkyUOqavsV7N87vZXQ3z96ZONIDVW1XZIHJjl2JKjs7/91km8neeIY/f77qNcTfR8AADOSkZUAAOuOi8bYdm2SzdbweXdP8oAkV65g/9b9592SXDoYAK6gr0ov0BvLbauo5TXp3QJ/cVWdmd6t7v/RWvvjQA1J8puV9LFz//n3Y+z7XZLHV9VGrbUbB7b/aVS7ib4PAIAZSVgJALDuuGMF22sF24c1e9TrWemNOHznGG2T5A+r0fes9EZfPiFjv5+VzrHZWvtiVf0gyVOTPC7Jq5O8tqqe1lr7+mrUsbpuHvV6Qu8DAGCmElYCALAi1ybZdHBDVc1Nb9GcQRck2bi19p1V9HdBeqMSN1/J6MoL0gtX/9RaW52Q806ttcVJPprko1W1dXoL67whvQVzLug32zO929THMrJi+h5j7Lt3kqtGjaocy4TfBwDATGTOSgAAVuSC/O98kyP+b+4+svKLSfarqseP7qCqNq2qkX8gPzG9AO+oMdqNjP48Kb2RiEeNWvAm1bPFioqtqtlVtWBwW2vtiiSXJpnX3/SL9G7ZPqKqNh2rhn7YeVaS5w22qao90xut+bUV1TBg6PcBADCTGVkJAMCKfCrJv1fViend5r1XkscnuWpUu3cleUqSU6rq2CRnJtkoyf2TPD3JLumNRjytqj6X5B+qavck30jvH88fkeS0JB9urV1QVW9M8vYku/RX4L4+vUVxnprkE0nevYJ65ye5pKq+lORX6d1q/ZgkD07yyiRprS2vqpcm+WqSs6rqM+mtDH7vJPfrv7+kd/v415OcUVXHJNkgyd8nWZrk6FV9cBN8HwAAM5awEgCAFflkeuHaC5MclN6K349N8t3BRq21m6rqkUlen97K4M9Ncl16c1UelV7AN+L5SX7d7/Nd/X0/T/Kjgf7eUVV/SPJP+d9RmBcn+VaSk1dS703p3f79uPzvatznJ3lZa+1jA/1/s6oO7Pf9yn67C/rvd6TNd6rqoPRWLn9LegvifC/Ja1troxfTGdME3gcAwIxVrbWprgEAAAAAwJyVAAAAAEA3CCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADrh/wMpJZfiX8/tNAAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAABSsAAALGCAYAAACtT8T3AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8WgzjOAAAACXBIWXMAAA9hAAAPYQGoP6dpAABRnElEQVR4nO3deZhkVX038O9vZphhG4Z9lU0kuKAIoyhGELeICyaKUYO+ikveqJjEiBsuAfWNGvfduKAYjSwRoiwKbuCS4IaiKKKAjqIMOzPswzLn/aOqSdH0zPRUd0/fmf58nqeeos4999SvqvtS1d8599xqrQUAAAAAYLrNmu4CAAAAAAASYSUAAAAA0BHCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQCwzqiqTarqg1W1qKruqKpWVQ+uqgP7/330dNdIt/V/T84e1XZ0v/3AaarpsP7zHzaqfVFVLZqOmgZqmNb3BgBY9wgrAYBp1Q862iQN984kf5/k/CRvT/LmJJdP0tgwlKo6tv97vst017K6VhSUAgBMlTnTXQAAwCR6SpLftNYOHmysqvtOUz2sGz6c5Pgkf5im5/+vJN9Psniann9lpvu9AQDWMcJKAGBdsn2S70x3EaxbWmtXJ7l6Gp9/aZKl0/X8KzPd7w0AsO5xGjgA0DlVtUv/1NNj+/99fFVdXVW3VtWPq+opo/qf3T+VvJI8auTU8tFrD47xPCtc829la/FV1X37tV1aVbdV1RVV9YWq2mMFY21YVa/t135DVd1YVb/qr6+5zRh9j6yq86rqpn7fc6rqb1b2WsZ4zgdV1XH917isqq6qqp9U1furar1RfWdX1Uuq6r+ramlV3VJVF1fVp6pq91F9F1TV26vq1/2fx3VVdWZVPW6MGu5aK7Sq9q2q06vq2tGnRFfV31TVWVW1pD/mr6rqjVU1b4wx96+qU6vqj/3XdXlVfb+qjlqN92ZuVb2pqi7pj/G7qvp/Yz1fv/+YvwvjqaX/e/n8/sPfDfxuLhroc3a/bW5V/XP/vV1WVcf2t6/0VOz+z+TDVfWn/vt3QVX9Q1XVqH4rXbt19PHQP34+03/4mYHa7/r5reI4eWxVndH/mS+rqt9U1TuqasEYfUfegzlV9fqquqi/z6VV9a9VNXesmgGAdY+ZlQBAl+2c5IdJfpvkc0k2T/KsJF+uqse11s7q9zs2ydlJjkry+/7jJFk02QVV1UFJTk6yXpJTk1yc5F5Jnp7kyVX16NbaTwb6b5bkrCR7Jfl1kk8nuS3Jbkle0B/rin7fTZN8K8neSX7S7zsryROSfKGqHtBae+M4anxQkh8kaUlOSfK7JJskuU+SlyV5Y5Lb+33nJjktyeOTXJrkC0muT7JLkqcl+V6Siwbq++8k90/yoyTvT7Jlkmcm+VpVvbS19vExStovyZH9sT7d3+e2/pif7r8Pf0xyUpIlSR6e5K1JHltVj2+t3THw3p/er++UJH9K73fifv3X9eZxvDeV5MQkf5nkkvROY56b5IVJHriq/QfGGW8tb07yV+n9/D/Qf30ZuB90UpKHJvlqki8luXIcpcxN8o0km6Z3OvbcJIf0n2uPJIeP9zWN4dh+nX+Z5MtJzhvYtmRlO1bV3yX5WJKbkvxneq/lwCSvTXJwVf15a22sMb6QZP/03oPrkzwpyWuSbJ3e7wkAsI4TVgIAXXZgkqNba3eFUFX1hSRnJHl1eiFgWmvH9rcdlWRRa+3oqSimHzwel+TmJAe01i4Y2LZneusKfirJPgO7fSS9oOrfkhzeWls+sM/GSWYP9H1/ekHla1tr7xzot3564dXrq+qLrbXzVlHq85Osn+SvWmtfHuM13DzQdHR6QeWpSf66tbZsoO+89ELOEf+aXlD5iSQvaa21fr9/TfLjJB+sqjNba4tG1fMX/f53CzL7MwVfkN6ajM9prd0ysO3o9MLnw9ML3pLkb9MLbw9srf1s1FhbrvDduLu/SS98+36SR7fWbu3vf1R6Aex4jauW1trR/VmIeyV5/xjvzaCdk+zZP7V6vLZLL8zfc+RnN/BaXlZVJ7TWhloaobV2bH9y5l8m+dLIcbYqVbVzkg8muTHJvq21Cwe2fTTJS9O7GNb/HWP33ZI8oLV2bb//G5L8LMnzqurI1poLZgHAOs5p4ABAl/0+yf8bbGitnZnexTz2nYZ6npfeDLajBoPKfl2/SPLJJHtX1f2TpKq2Tm8m6OIkrxoMKvv73NhfjzBVtUWS5yb58WBQ2e93a3oz0irJoatR7y2jG1pr143UUVWz05sFeEt6YeKyUX2Xtdau6ved26/vxiRHjgSV/X4XpRdOzU3vPRrtvBXMuPzHJHckeeFgUNn31iTXJHnOOF/XeAO+kdl5rx8JKvv7X9t/ztU1kVpGe9OQ+x45+LMb9VqmYzbic9P7XfjwYFDZ94YkNyT5Pys47f61I0FlkrTWbkryH+n93fKQKaoXAOgQMysBgC47r7V25xjtl6Z3avGaNvKce61g3b8/69/fL8kF6Z3SOyvJd/qhy8o8NL1ZlitaU3Bkncn7jaPOE9ILAr9UVV9M7zTh/26tXTKq332TLEjyg9baZasYc48kG/bHuXaM7d9K7/TyvcfY9sPRDVW1YXqzDa9O8opRyyuOWJa7v97/SO90+x9U1Qnpzaz979baH1dR+6B9kixP75T00c5ejXEmo5bR7vE+jcMdSf5njPaz+/dj/Tym2sjM4m+N3tBau66qfprkgPR+/342qsuPxxjv0v79ZpNWIQDQWcJKAKDLlqyg/Y5MzxkiW/Tv/3YV/Tbu32/av//Taoz90P5tVWOvUGvth1W1f3qz2J6R5P8kSVX9OsmbW2vHDVHfyEVRFq9g+0j7pmNsG+vU3c3Smym6VXqne69Sa+3k6l1c6Yj01pj8uySpqnPTm1349XEMsyDJta2128dZ51TWMvTzD7h6BYH+yFj3uJjNGjD078oK1rG8o38/e4xtAMA6xmngAMBMtjwr/sfbTcdoW9q/36u1Viu5fbbfb0n/fodx1DIy9vtWMfajx/PCWmvntNaekl4o+OfpnRa8TXoX6hm5cvcw9W27gu3bjep3t3JWMt5PV/F67zblsrV2emvtMem9rscmeV+SByQ5beT0+3G8js1r1BXR+1b02sY0CbWMHm+s92lVtuyfzj/ayGsZ/HmMLEOwOr/zw5jI7woAMMMJKwGAmey6JNusILgaa3287/fv9x/n+D9MLyA6oKo2Gmff8Y49Lv11J/+ntfbPSf6h3/yX/fsL0wssH1RV269iqF+nd2GevfpXBR9tJET9yRjbxqrrxiS/TPKAqtp8PPuM2v+m1tq3WmuvTPK29NZIfOI4dv1Jet+BHznGtgNXt45x1jIy83EqZgbOSfKIMdoP7N//dKDtuv79jqM7V9V9MvYszGFqH3nOA0dv6P/uPDjJrUl+tRpjAgAzhLASAJjJfphe2HO3i5D0r1L952P0/0x64d5RVXWPC/xU1ayqOnDkcf/iNMenN5Ps3VU1a1T/jatqQb/vlemtg/iQqnrTWLPlqmq3qtp1VS+qqh5RVRuMsWmb/v3N/ee8M8lHk2yQ5N9GX/CkquZW1Vb9vrf165ufUReiqard0gtCb0/yuVXVN+C96QV7nx4rAK2qzapqn4HHB1TVWLMC7/a6VuEz/ft/6V9lfWTszdNbc3NcVrOWa/r3O413/NX09sGf3ajX8pmBfhcmuT7JX/Yv/jTSf4P0LpA0lmFq/3x6vwt/3w9BB701vSvMf370BZ0AABJrVgIAM9uH0gsqP1ZVj03vQh4PTu9COqclecpg59baNVX1jCT/leT7VfXN9GYHtvRmq+2X3tqT6w/s9vIkeyZ5SZIDq+rMJLcl2TXJE5I8Nf97MZSXJ9k9yVvSu1ry95JckWT79C4089Akf5Pkd6t4Xa9J8piq+m6/743pnZ78xPRm131ioO+bkzwsycFJflNVp6V3teYdk/xFklcnObbf93Xpzfx8eVU9NL2LymyZ5JnphZgvb62tqra7tNY+XVUL07si+SX99+YPSTbvvz8HpBe2vaS/yweT7FBV/51kUXrv48Ikj0nvyvHHj+Npj0vvCu1PTfKLqvpyehcvekaSHyXZbZzlr04t30zvffxkVZ2U3vu7pLX24XE+18osTjIvvddySv73tWyX5KOtte+MdGyt3V5VH0jypiQ/rar/Su/vgccnuax/G+2c9ILXV/SvWD+yFuaHRq5kP1prbVFVvSLJR5L8pKpOTHJVkkeld4xcmN7V7QEA7kFYCQDMWK21C/rrN74tvbDujiTfTS9QeXpGhZX9fb5ZVQ9K8qr0wsb90wuqLkvv6scnjep/XVU9Iskr0gvJ/m96p9ZemuTT6V01fKTv9VX1qH6fQ5Mckl7weUWSi5L8U5LxXLjlo+mFkg9L73TnOUn+2G9/T2vt9wPPeVtVHZReIPi8JM9P78I3l6UXyn5voO+1VbVfkiP7788rk9yS3gzVd7XWvjaO2u6mtXZ4VX21//yPS2/dxGvTCy3fld4svRFvS/K09E7Rf1x6p83/od/+/tbadVmF1lqrqr9OL3g9LL2AeHF6oehb0js9eTzGXUtr7cyqOiK9CzO9Ir3ZpL9PMhlh5W39539bkmenFx7/Nsk70gvjRzsqvfDxb9P7Pbs8vWD16Az8Lg7Ufl1VHdLf77AkI8sZfD4rWXOytfbRqro4vePkkPSuJH9pej/Tt63gQjoAAKnh1vEGAAAAAJhc1qwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJ8yZ7gLWtKqqJNsnuWG6awEAAACAtdT8JJe11tpkDjrjwsr0gso/TncRAAAAALCWu1eSP03mgDMxrByZUXmvmF0JAAAAAKtrfnqTASc9W5uJYeWIG1pr1093EQAAAACwNumtsjg1XGAHAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnzOQL7AAAAADMKOeee+78JNvFBDbuaXmSa5Jct3DhwuXTVUS11qbruadFVW2SZGmSBa4GDgAAAMwE55577qwkr589e/bzqmq9JFN3OWfWWq21O1prly9fvvzdSU5ZUWg5lfmamZUAAAAA677Xr7feei/ddtttb9too41urqqZNXuNVWqt5Y477pizdOnSXZcsWfKh22+//QFJ/mVN12FmJQAAAMA67Nxzz91k9uzZP95+++3X23rrra+Z7nroviuuuGKLxYsX33TnnXc+YuHChffIz6YyX7M+AQAAAMC6bduqWm+jjTa6eboLYe2w8cYb31RVGyfZdk0/t7ASAAAAYN02K0k59ZvxqrprSdM1nh0KKwEAAACAThBWAgAAAACdIKwEAAAAgI7Yd99999h33333mO46pouwEgAAAADohDnTXQAAAAAA02OX152+cLprSJJF73jyudNdA91gZiUAAAAAjGH58uW58cYba9U9mSzCSgAAAADWSq985Su3r6qFv/jFL+Ydcsghu8yfP//B8+fPf/AznvGMXW644Ya7cq/bb789r371q7fbcccd95w7d+4+O+ywwwNf/vKX73DLLbfcLYjcYYcdHvjoRz/6PieddNIme+655/022GCDfd773vduddppp82vqoWf+tSnNjviiCO223rrrR+00UYb7X3QQQfd+5prrpl9yy231Atf+MIdN99887023HDDvZ/xjGfsMnrsD3zgA1s8/OEP/7PNN998r7lz5+6z2267PeBf//Vft1pT79XawmngAAAAAKzVnvnMZ957xx13vO2Nb3zjn376059ueMIJJ2y51VZb3f6xj33sT0ny7Gc/e5eTTz55i4MOOui6ww8//Iof/vCHG33kIx/Z9te//vX6X//61y8ZHOu3v/3t+i984Qvv/dznPveq5z//+Vfd7373Wzay7T3vec9266+//vJ//Md/vPziiy+e99nPfnbrww47rFVVli5dOvs1r3nNZT/4wQ82Oumkk7bYZZddlr373e9ePLLvpz71qa332GOPW570pCctmTNnTvvKV76y6ete97qdli9fniOPPPKqNfdudZuwEgAAAIC12p577nnziSee+PuRx9dee+2c448/fsuPfexjfzrnnHM2OPnkk7d41rOedfXxxx8/0ueqv/u7v7vjE5/4xDannnrq/IMPPviGkX3/8Ic/zPviF7940SGHHHL9SNtpp502P0nuvPPOfP/73//1vHnzWpJcffXVc04//fTN999//6Xf/va3Lx4Ze++9917/uOOO23IwrDznnHMu3HjjjdvI49e//vVX7b///rt/9KMf3UZY+b+cBg4AAADAWu3www+/W9j353/+5zcsWbJkzrXXXjvrlFNOWZAkr3nNa64Y7POGN7zh8iQ59dRTFwy277DDDrcNBpWDnvWsZ10zElQmyb777ntTay2HHXbYNYP99tlnn5suv/zyubfffvtdbYNB5TXXXDN78eLFcx75yEfe8Mc//nHeNddcM3u1X/Q6ysxKAAAAANZq9773vW8bfLzZZpvdmfRmPv7+97+fO2vWrDzgAQ9YNthnp512umP+/Pl3XnrppXMH23fccce79Ru1z92eZ8GCBXcmyc4773yP9uXLl+eaa66Zve22296ZJF/72tc2Ovroo3f46U9/utGtt956twmE11577ewtttjizvG/4nWXsBIAAACAtdqcOWNHXK3dNZkxs2bNamN2GmX99ddfvrrPM2fOnDHHbq1Vkvzyl7+cd/DBB++x66673vqWt7zl0p122un2efPmLT/ttNMWHHPMMdssX77Cp5xxOhVWVtVLk7w0yS79pl8meUtr7av97esneU+SZyeZl+TMJC9rrV1xz9EAAAAAmOl23nnn25YvX57zzz9//X322efWkfZLL710zg033DB7xx13vG1l+0+Gk046acFtt91Wp5566sW77777Xc/3zW9+c5Opfu61TdfWrPxjktclWZjkIUm+leTLVfWA/vb3JTk4yV8neVSS7ZOcPA11AgAAALAWeOpTn7o0Sd797ndvM9j+tre9bZskOfjgg5dOdQ2zZ/eWpByc6XnNNdfMPuGEE7aY6ude23RqZmVr7dRRTW/oz7Z8eFX9McmLkhzaWvtWklTVC5L8qqoe3lr7/houFwAAAICO22+//W55+tOffs1xxx235dKlS2fvv//+N/zoRz/a6OSTT97icY973JLBK4FPlac85SlL3/zmN9/rKU95yn1e8IIXXHXjjTfO/tznPrfl5ptvfsdVV1213lQ//9qkazMr71JVs6vq2Uk2SnJOerMt10vyjZE+rbULk/whyX4rGWdeVW0ycksyf2orBwAAAKBLjj/++EVHHHHEZT/72c82etOb3rTj//zP/8w//PDDLz/llFN+uyaef6+99lp27LHHXlJVefOb37zjscceu9Xznve8q1760pda2nCUGpx+2gVV9cD0wsn1k9yY3kzKr1TVoUk+01qbN6r/D5Oc1Vp77QrGOzrJUWNsWtBaG/My9AAAwMyzy+tO79YfRxOw6B1PrumuAeiOc889975z5sw5Y/fdd79xww03vHXVezDT3XzzzetfdNFFG99xxx0HLVy48MLR2/sTApdmCvK1Ls6s/HWSByd5WJKPJflsVd1/AuO9PcmCgdu9JlogAAAAADD5OrVmZZK01m5LcnH/4blV9dAk/5jkhCRzq2rT1tqSgV22SXL5SsZblmTZyOMq/8AIAAAAAF3UxZmVo81KMi/JuUluT/LYkQ1VtUeSndI7bRwAAAAAWIt1amZlVb09yVfTu2jO/CSHJjkwyRNaa0ur6pgk762qa5Ncn+RDSc5xJXAAAAAAWPt1KqxMsnWSf0+yXXqLdP48vaDy6/3t/5RkeZKT0ptteWaSl01DnQAAAADAJOtUWNlae9Eqtt+a5PD+DQAAAABYh6wNa1YCAAAAADOAsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAMAMUVULX/nKV24/3XWsyJzpLgAAAACAaXL0goXTXUKS5Oil5053CV3yb//2b5tfeeWVc/75n//5yumuZU0zsxIAAAAAOuSEE07Y/OMf//g2013HdBBWAgAAAMAYbr755rrzzjvH3Hb99dfL1aaANxUAAACAtdrvfve79Z75zGfuvPXWWz9o7ty5++ywww4PfM5znrPTrbfeWklywQUXzH3iE5947wULFjx4gw022Huvvfa67/HHH79gcIzTTjttflUt/MQnPrHZP/zDP2y/9dZbP2jjjTfe57rrrpt9yCGH7LLhhhvu/ctf/nLeox71qPtstNFGez/96U/fNUl22GGHBx5yyCG7jK5p33333WPffffdY/T4n/zkJzd7+ctfvsOWW2651wYbbLD3Yx7zmPtcfPHF6w3ud/bZZy+47LLL5lbVwqpauMMOOzxwZPstt9xS//RP/7T9TjvttOfcuXP32XbbbR/0kpe85F633HJLDT7/LbfcUi960Yt23GyzzfbaaKON9n7MYx5zn0suuWS9dJw1KwEAAABYay1atGi9hz3sYfe74YYbZh966KFX3/e+973lT3/609xTTz11sxtvvHHWVVddVQcccMD9br311lkvfOELr9hiiy3uOO6447Z8znOec5/bbrvtkuc973lLBsd75zvfuf16663XDj/88MuXLVs2a968eS1J7rzzznriE5+4+0Mf+tAbjz766Es33HDD5cPU+653vWu7qsrf//3fL77yyivXO+aYY7Z53OMet8cvfvGLX2688cbtyCOPXPz617/+Xpdffvl6//Iv/3JpksyfP395v4Y8/vGPv8+555678aGHHnr1/e53v1vOP//8DT71qU9tffHFF8/7xje+ccnI8/zN3/zNLl/+8pc3P/jgg6/db7/9bjz77LM3edKTnrT70G/0GiKsBAAAAGCt9cpXvnKHa665Zr2zzjrrVwcccMDNI+3vf//7L1u+fHn+9m//dsdrrrlmzhlnnPHrJzzhCTcmySte8YqrH/CABzzgyCOP3PE5z3nOktmzZ9813rJly+q88867YOONN26Dz3PbbbfVwQcffN1HPvKRP02k3qVLl8658MILf7HZZpstT5KFCxfe/KIXveje73//+7d64xvfeOXTnva06z/4wQ/edv31189+2ctedu3gvh//+Mc3P+ecczb5yle+ctdrSZI999zzlte85jU7f/3rX9/o8Y9//E3nnHPOBl/+8pc3f+5zn3vV5z73uT8kyZFHHnnVU5/61F1/85vfbDCR+qea08ABAAAAWCvdeeed+frXv77pox/96CWDQeWIWbNm5Zvf/OaCBz7wgTcNhnsLFixY/rznPe+qyy67bO5PfvKT9Qf3edaznnXN6KByxCte8YqrJlrzX//1X18zElQmyWGHHXbdVlttdfuZZ565YGX7JclJJ5202b3vfe9bH/SgB926ePHiOSO3gw466IYk+cY3vjE/SU455ZQFSfKqV73qisH9jzjiiCvuOWq3mFkJAAAAwFrpsssum3PjjTfOvv/973/LivosXrx47t57733j6Pb73//+tybJJZdcMu+hD33orSPtu+6667Kxxpk9e3a7973vfdtEa959991vHXw8a9as7LTTTsv++Mc/zl3VvosWLVr/t7/97frbb7/9XmNtv/LKK9dLkt///vdzZ82alfvf//53ey0PfOADbx1rvy4RVgIAAABA34YbbjjmrMq5c+e2wdPFV+XOO+/M6vQfj+XLl2f33Xe/5Z3vfOelY23fddddJxymTjdhJQAAAABrpe233/6OjTfe+M4LLrhgheswbrfddrddcskl649u/9WvfrV+kuy2225jzqQcrwULFtyxdOnSe6SSl1122dwdd9zxHuHhRRdddLdali9fnj/84Q/z9thjj7tmh1bV6N2SJDvvvPOyX/3qVxs+9alPvWHWrBWv7rjzzjvftnz58lxwwQXz9tprr7te3/nnn3+P96FrrFkJAAAAwFpp9uzZefzjH7/krLPO2vQ73/nOhqO3L1++PI997GOXnn/++Rt94xvf2Gik/frrr5/17//+71tuv/32t+2zzz4TOjV65513XnbeeedtfOutt96VMB533HELLr/88jFP6/7P//zPLa677rq7Mrljjz12s6uuumq9v/iLv1g60rbhhhsuv+GGG+4RgB5yyCHXXXnlleu9973v3XL0thtvvLGuv/76WUly8MEHL02Sd7/73dsM9nnPe96zzej9usbMSgAAAADWWu95z3v+9N3vfneTJzzhCXsceuihV9/vfve7ZfHixeudcsopm59zzjkXHn300Yu//OUvb/60pz1t9xe96EVXbr755nccd9xxW/7pT3+ad+yxx14y0VO1X/ziF199xhlnbHbggQfu/vSnP/26Sy65ZN7JJ5+8+Y477jjmjM0FCxbc8fCHP/y+z3nOc66+4oor1jvmmGO22WmnnZa94hWvuHqkz957733T6aefvtmLX/ziez30oQ+9ef78+XceeuihS1/2spddc9JJJ232mte8Zudvf/vb8/fbb78b77zzzrrwwgvXP/300zc/5ZRTfnPAAQfc/IhHPOKWpzzlKdd+/vOf3+r666+fvd9++9141llnbbJo0aJ5E3qxa4CwEgAAAGCmOnrpudNdwkTtuuuut//P//zPha997Wu3/6//+q/NP//5z8/eeuutb3v0ox99/cYbb7x8yy23bN/5znd+dcQRR9zr05/+9Na33XbbrD/7sz+7+Qtf+MLFz372s5eu+hlW7pBDDrn+qKOO+uPHPvaxbd70pjftuOeee9508sknX3zEEUfsOFb/V73qVYt//vOfb/iBD3xgu5tvvnnWfvvtd/0nPvGJP8yfP/+uK4S/+tWvvupnP/vZhieeeOKWxxxzzOztt9/+tkMPPfT82bNn58wzz7zkrW9969YnnHDCll/72tc2W3/99ZfvuOOOy1784hdfseeee941S/SEE05YdPjhh9/xpS99afOvf/3rmz784Q+/4Stf+cpF97nPfR400dc8laq1MdcMXWdV1SZJliZZ0Fq7frrrAQAAumGX152+zvxxtOgdTx57sTNgRjr33HPvO2fOnDN23333GzfccMPOXw16XXXaaafNP/jgg//s05/+9G9f8IIXXDfd9azMzTffvP5FF1208R133HHQwoULLxy9fSrzNWtWAgAAAACdIKwEAAAAADpBWAkAAAAAdIIL7AAAAADAFHvKU55yQ2ttrb+g0VQzsxIAAAAA6ARhJQAAAMC6bXmStNamuw7WEgO/K8vX9HMLKwEAAADWbde01u644447LAfIuNx+++3rtdbuSLJkTT+3sBIAAABg3XZda+3ypUuXzp/uQui+1lqWLFmyYPny5ectXLjwyjX9/BJ1AAAAgHXYwoULl5977rnvXrJkyYfmzZu3xcYbb3xTVU13WXRMay233377ekuWLFmwZMmSG1prH5+OOoSVAAAAAOu+U26//fYHLF68+G+qauPpLoZuaq3dsXz58u+11j6+cOHCr09HDcJKAAAAgHXcwoULlyf5l3PPPfdDSbaNpQG5p+VJlkzHqd+DhJUAAAAAM8TChQuvT3L9dNcBKyJFBwAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJ8yZ7gIAAIC12y6vO71Ndw0AwLrBzEoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6IROhZVVdWRV/aiqbqiqK6vqS1W1x6g+Z1dVG3X7t+mqGQAAAACYHJ0KK5M8KslHkjw8yeOTrJfka1W10ah+n0yy3cDtNWuySAAAAABg8s2Z7gIGtdYOGnxcVYcluTLJwiTfGdh0c2vt8jVYGgAAAAAwxbo2s3K0Bf37a0e1P6eqrq6qX1TV26tqwxUNUFXzqmqTkVuS+VNWLQAAAAAwtE7NrBxUVbOSvD/Jf7fWfjGw6QtJfp/ksiQPSvKvSfZI8vQVDHVkkqOmrlIAAAAAYDJ0NqxMb+3KPZM8crCxtfaJgYfnV9XiJN+sqt1aa5eMMc7bk7x34PH8JH+c7GIBAAAAgInpZFhZVR9O8pQkB7TWVhUs/qB/f58k9wgrW2vLkiwbGHuyygQAAAAAJlGnwsrqJYkfSvK0JAe21n43jt0e3L9fPFV1AQAAAABTr1NhZXqnfh+a5C+T3FBV2/bbl7bWbqmq3frbv5LkmvTWrHxfku+01n4+HQUDAAAAAJOja2HlS/v3Z49qf0GSY5PcluRxSV6RZKMklyY5Kcn/WyPVAQAAAABTplNhZWttpQtKttYuTfKoNVQOAAAAALAGzZruAgAAAAAAEmElAAAAANARwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6oVNhZVUdWVU/qqobqurKqvpSVe0xqs/6VfWRqrqmqm6sqpOqapvpqhkAAAAAmBydCiuTPCrJR5I8PMnjk6yX5GtVtdFAn/clOTjJX/f7b5/k5DVcJwAAAAAwyeZMdwGDWmsHDT6uqsOSXJlkYZLvVNWCJC9Kcmhr7Vv9Pi9I8quqenhr7ftruGQAAAAAYJJ0bWblaAv699f27xemN9vyGyMdWmsXJvlDkv3GGqCq5lXVJiO3JPOnsF4AAAAAYEidDSuralaS9yf579baL/rN2ya5rbW2ZFT3K/rbxnJkkqUDtz9OerEAAAAAwIR1NqxMb+3KPZM8e4LjvD29GZojt3tNcDwAAAAAYAp0as3KEVX14SRPSXJAa21wJuTlSeZW1aajZldu0992D621ZUmWDYw9+QUDAAAAABPWqZmV1fPhJE9L8pjW2u9GdTk3ye1JHjuwzx5JdkpyzhorFAAAAACYdF2bWfmRJIcm+cskN1TVyDqUS1trt7TWllbVMUneW1XXJrk+yYeSnONK4AAAAACwdutaWPnS/v3Zo9pfkOTY/n//U5LlSU5KMi/JmUletgZqAwAAAACmUKfCytbaKheUbK3dmuTw/g0AAAAAWEd0as1KAAAAAGDmElYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE+ZM1kBVtWGSZyeZl+QrrbXfT9bYAAAAAMC6b6iwsqqOSfKw1tqe/cdzk3w/yZ79Lkur6jGttZ9OTpkAAAAAwLpu2NPAH53k5IHHh6YXVD6nf395kqMmVhoAAAAAMJMMG1Zum2TRwOO/SvLj1tpxrbULknwyycMmVhoAAAAAMJMMG1belGTTJKmqOUkOTHLmwPYbkiyYSGEAAAAAwMwy7AV2fpLkb6vqrCRPTTI/yakD23dLcsUEawMAAAAAZpBhw8o3pDeT8sdJKskXW2s/HNj+tCT/PcHaAAAAAIAZZKiwsrX246q6b5JHJFnSWvv2yLaq2jTJR5OcPRkFAgAAAAAzw1BrVlbVAUnSWvvyYFDZb1uS5AuxZiUAAAAAsBqGvcDOWUkev5Ltj+n3AQAAAAAYl2HDylrF9nlJ7hxybAAAAABgBhr3mpVVtVOSXQaa7jtyOvgomyb5uyS/n1BlAAAAAMCMsjoX2HlBkqOStP7tDf3baJXerMq/m3B1AAAAAMCMsTph5YlJfpFeGHlikg8m+e6oPi3JTUnOa61dMSkVAgAAAAAzwrjDytbar5L8Kkmq6gVJvtNa+91UFQYAAAAAzCyrM7PyLq21z052IQAAAADAzDZUWJkkVXW/9NaxvHeSzXLPK4S31tpjJ1AbAAAAADCDDBVWVtX/SfKZJLcn+XWS68bqNoG6AAAAAIAZZtiZlUcn+WmSJ7bWrp68cgAAAACAmWrWkPttn+TTgkoAAAAAYLIMG1b+PL3AEgAAAABgUgwbVr4yyYuq6hGTWQwAAAAAMHMNu2bla5MsTfLdqrogyR+S3DmqT2ut/eVEigMAAAAAZo5hw8oHJWnphZQbJ7n/GH3asEUBAAAAADPPUGFla22XSa4DAAAAAJjhhp1ZCQAAQEft8rrT15kz3Ra948k13TUAsOaMK6ysqp2SpLX2h8HHqzLSHwAAAABgVcY7s3JRklZVG7TWbht5PI79Zg9ZFwAAAAAww4w3rHxheuHk7aMeAwAAAABMinGFla21Y1f2GAAAAABgomZNxiBVtUFVbTAZYwEAAAAAM9PQYWVV7VRVn6mqK5LcmOTGqrqiqj5dVTtPXokAAAAAwEww3jUr76aq7pvke0k2TfL1JL/qb7pvkuclObiqHtla+/VkFAkAAAAArPuGCiuTvCPJ8iR7t9bOH9xQVXsm+Wa/z9MmVh4AAAAAMFMMexr4o5J8cHRQmSSttV8k+XCSAydQFwAAAAAwwwwbVq6X5JaVbL+53wcAAAAAYFyGDSt/muTFVbVg9Iaq2iTJi5L8ZCKFAQAAAAAzy7BrVh6V5IwkF1bVZ5L8pt++R5LnJ9kiyeETLw8AAAAAmCmGCitba9+qqicleVeS143afF6S/9NaO2uCtQEAAAAAM8iwMyvTWvtGkr2ratskO/ebf99au3xSKgMAAAAAZpShw8oR/XBSQAkAAAAATMiwF9hJVW1VVe+uqguq6ub+7YJ+2zaTWSQAAAAAsO4bKqysqgckOT/JK5MsTfKf/dvSftvPq2rPySoSAAAAAFj3DXsa+EeSzE7ysNbajwY3VNW+Sb6S5ENJHj2x8gAAAACAmWLY08D3TfKB0UFlkrTWfpjkA0keNpHCAAAAAICZZdiw8sokt65k+639PgAAAAAA4zJsWPn+JC+tqm1Hb6iq7ZO8tN8HAAAAAGBchl2zclaSG5NcXFX/leTifvvuSf6q/3hWVb1yYJ/WWnvfsIUCAAAAAOu2YcPKdw/893PG2P6gUX2SpCURVgIAAAAAYxo2rNx1UqsAAAAAAGa8ocLK1trvJ7sQAAAAAGBmG/YCOwAAAAAAk0pYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnjCiur6h+q6s+muhgAAAAAYOYa78zK9yV5yMiDqrqzqg6dmpIAAAAAgJlovGHldUm2GXhcU1ALAAAAADCDzRlnv7OTHF1VD06ytN/2vKp6+Er2aa21f5xAbQAAAADADDLesPJlSd6f5C+SbJ2k9f/7L1ayT0sirAQAAAAAxmVcp4G31q5srR3aWtuutTY7vdPAn9tam7WS2+ypLR0AAAAAWJeMd83K0V6Q5H8msxAAAAAAYGYb72ngd9Na++zIf1fV/ZPs3H/4+9baBZNRGAAAAAAwswwVViZJVf1lkvcm2WVU+++SvLK1dsrESgMAAAAAZpKhTgOvqiclOan/8PVJnta/vT699SxPrqqDJqVCAAAAAGBGGHZm5ZuS/DzJ/q21mwbaT6mqDyf5XpKjkpwxwfoAAAAAgBli2AvsPCjJZ0cFlUmSftux/T4AAAAAAOMybFh5a5LNV7J9834fAAAAAIBxGTas/FaSf6yq/UZvqKqHJfmHJN+YSGEAAAAAwMwy7JqVr0lyTpLvVdUPk/y6375Hkn2TXJnktRMvDwAAAACYKYaaWdla+116a1J+MMlmSZ7Vv22W5ANJ9mqtLZqkGgEAAACAGWDYmZVprV2Z5J/6NwAAAACACRl2zUoAAAAAgEklrAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AmrHVZW1YZVdW5VvWQqCgIAAAAAZqbVDitbazcn2TVJm/xyAAAAAICZatjTwM9I8oTJLCRJquqAqjq1qi6rqlZVfzVq+7H99sHbGZNdBwAAAACw5g0bVr41yZ9V1eeq6pFVtUNVbT76NsS4GyX5WZLDV9LnjCTbDdz+ZojnAQAAAAA6Zs6Q+/2yf3//JIeupN/s1Rm0tfbVJF9NkqpaUbdlrbXLV2dcAAAAAKD7hg0r35LpW7PywKq6Msl1Sb6V5I2ttWtW1Lmq5iWZN9A0f4rrAwAAAACGMFRY2Vo7epLrGK8zkpyc5HdJdkvytiRfrar9Wmt3rmCfI5MctYbqAwAAAACGNOzMyrupqgVJblxJYDgpWmvHDzw8v6p+nuSSJAcm+eYKdnt7kvcOPJ6f5I9TUiAAAAAAMLRhL7CTqnpIVZ1RVTcnuSbJo/rtW1bVl6vqwMkpccVaa79NcnWS+6ykz7LW2vUjtyQ3THVdAAAAAMDqGyqsrKpHJPlekt2TfH5wnNba1UkWJPm7yShwFXXcK8kWSRZP9XMBAAAAAFNr2NPA35bkV0kent5p1S8etf2sJM9f3UGrauPcfZbkrlX14CTX9m9HJTkpyeXprVn5ziQXJzlzdZ8LAAAAAOiWYU8Df2iSz7TWlmXsq4L/Kcm2Q4z7kCQ/7d+S3lqTP03v6uN3JnlQklOS/CbJMUnOTbJ/vw4AAAAAYC027MzK27PyoHOHJDeu7qCttbOT1Eq6PGF1xwQAAAAA1g7Dzqz8fpJnjLWhqjZK8oIk3x62KAAAAABg5hk2rDwqyUOq6vQkT+y37VVVL07v1Oytkrx1EuoDAAAAAGaIocLK1toPkjwpvYvh/Hu/+T1JPpFkdpIntdZ+PikVAgAAAAAzwrBrVqa19q0ke1TV3umFlrOSXJLk3NbaWBfdAQAAAABYoaHDyhGttcGrdwMAAAAADGXosLKq5iX52/ROB9+l37woyVeSfKq1dutEiwMAAAAAZo6h1qysqnslOS/JB5PsleSq/m2vftt5/T4AAAAAAOMy7NXAP5Jk5yTPbK3t0Fp7VP+2Q5JnJdmp3wcAAAAAYFyGPQ38sUne11r74ugNrbX/rKp9kvz9hCoDAAAAAGaUYWdW3pDkypVsv7zfBwAAAABgXIYNKz+T5LCq2nD0hqraOMkLkhwzkcIAAAAAgJllXKeBV9XTRzX9NMmTk1xYVZ9NcnG/ffckz0tybZKfT1aRAAAAAMC6b7xrVn4xSUtS/ceD//2GMfrfK8lxSU6cUHUAAAAAwIwx3rDy0VNaBQAAAAAw440rrGytfXuqCwEAAAAAZrZhL7ADAAAAADCpxnsa+D1U1SOTvDDJvZNslv9dw3JEa63tNYHaAAAAAIAZZKiwsqpemeRdSW5N8uv0rv4NAAAAADC0YWdWvjrJfyc5uLW2dBLrAQAAAABmqGHXrNwwyX8IKgEAAACAyTJsWHlWkgdOZiEAAAAAwMw2bFj590keW1WvqqrNJ7MgAAAAAGBmGiqsbK1dmuTjSd6R5Kqquqmqrh91c4o4AAAAADBuw14N/C1J3pDkT0l+nEQwCQAAAABMyLBXA39JktOT/FVrbfkk1gMAAAAAzFDDrlk5N8npgkoAAAAAYLIMG1aelmT/ySwEAAAAAJjZhg0r35zk/lX10apaWFVbVdXmo2+TWSgAAAAAsG4bds3KX/fvH5zk71bSb/aQ4wMAAAAAM8ywYeVbkrTJLAQAAAAAmNmGCitba0dPch0AAAAAwAw37JqVAAAAAACTaqiZlVX1z+Po1lprbx1mfAAAAABg5hl2zcqjV7KtJan+vbASAAAAABiXoU4Db63NGn1LL/jcLcn7kvw4ydaTWCcAAAAAsI6btDUrW2vLW2u/a629KslFST40WWMDAAAAAOu+qbrAzneSPGmKxgYAAAAA1kFTFVY+JMnyKRobAAAAAFgHDXs18OetYNOmSQ5I8vQknxqyJgAAAABgBhr2auDHrmTb1UnekeQtQ44NAAAAAMxAw4aVu47R1pJc11q7YQL1AAAAAAAz1FBhZWvt95NdCAAAAAAwsw07s/IuVbVxks2S1OhtrbU/THR8AAAAAGBmGPYCO+snOSrJi5JssZKus4cZHwAAAACYeYadWfnRJM9P8qUk301y3WQVBAAAAADMTMOGlU9P8qnW2t9NZjEAAAAAwMw1a8j9WpKfTGYhAAAAAMDMNmxY+eUkj5vMQgAAAACAmW3YsPKtSe5dVZ+oqoVVtVVVbT76NpmFAgAAAADrtmHXrLyof793elcEXxFXAwcAAAAAxmXYsPIt6a1bCQAAAAAwKYYKK1trR09yHQAAAADADDfsmpUAAAAAAJNKWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCXOmuwAAAABYkV1ed3qb7homw6J3PLmmuwaAtYGZlQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6IROhZVVdUBVnVpVl1VVq6q/GrW9quotVbW4qm6pqm9U1e7TVC4AAAAAMIk6FVYm2SjJz5IcvoLtr0nyD0lekuRhSW5KcmZVrb9mygMAAAAApsqc6S5gUGvtq0m+miRVdbdt1Wt4RZL/11r7cr/teUmuSPJXSY5fg6UCAAAAAJOsazMrV2bXJNsm+cZIQ2ttaZIfJNlvRTtV1byq2mTklmT+lFcKAAAAAKy2tSms3LZ/f8Wo9isGto3lyCRLB25/nPzSAAAAAICJWpvCymG9PcmCgdu9prccAAAAAGAsnVqzchUu799vk2TxQPs2Sc5b0U6ttWVJlo08Hr0WJgAAAADQDWvTzMrfpRdYPnakob8G5cOSnDNdRQEAAAAAk6NTMyurauMk9xlo2rWqHpzk2tbaH6rq/UneWFUXpRdevjXJZUm+tIZLBQAAAAAmWafCyiQPSXLWwOP39u8/m+SwJO9MslGSTyTZNMn3khzUWrt1zZUIAAAAAEyFToWVrbWzk6xwUcnWWkvyz/0bAAAAALAOWZvWrAQAAAAA1mHCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdMJaFVZW1dFV1UbdLpzuugAAAACAiZsz3QUM4ZdJHjfw+I7pKgQAAAAAmDxrY1h5R2vt8ukuAgAAAACYXGvVaeB9u1fVZVX126r6j6raaWWdq2peVW0ycksyfw3VCQAAAACshrVtZuUPkhyW5NdJtktyVJLvVtWerbUbVrDPkf1+M8fRC9p0l0CHHb20prsEAACYaXZ53enrzN9pi97xZH9TAFNmrZpZ2Vr7amvtP1trP2+tnZnkSUk2TfLMlez29iQLBm73mvJCAQAAAIDVtrbNrLyb1tqSqvpNkvuspM+yJMtGHlf5ByAAAAAA6KK1amblaFW1cZLdkiye7loAAAAAgIlZq8LKqnp3VT2qqnapqkck+a8kdyY5bppLAwAAAAAmaG07Dfxe6QWTWyS5Ksn3kjy8tXbVtFYFAAAAAEzYWhVWttaePd01AAAAAABTY606DRwAAAAAWHcJKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBPmTHcBAHTI0QvadJdARx29tKa7BFjX7PK60/0/FwBgFDMrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADphznQXAAAAAABJssvrTm/TXcNkWfSOJ9d017A2MrMSAAAAAOgEYSUAAAAA0AnCSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVgIAAAAAnSCsBAAAAAA6QVgJAAAAAHSCsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJc6a7AGANO3pBm+4SAFjH+GwZyqL1p7uCNWOXW78w3SUArNAurzt9nfgMW/SOJ9d01wCTxcxKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQAAAAB0grASAAAAAOiEtTKsrKrDq2pRVd1aVT+oqn2nuyYAAAAAYGLWurCyqp6V5L1J3pxknyQ/S3JmVW09rYUBAAAAABOy1oWVSV6Z5JOttc+01i5I8pIkNyd54fSWBQAAAABMxJzpLmB1VNXcJAuTvH2krbW2vKq+kWS/FewzL8m8gab5I/dVNVWlTqulr5u/6k4AsBoWVG0y3TXQXb57sDLLl9083SUAk6zWoe8FO77ixOkuYVL4mXTTuvRzGcOUfQGs1tpUjT3pqmr7JH9K8ojW2jkD7e9M8qjW2sPG2OfoJEetsSIBAAAAYGa4V2vtT5M54Fo1s3JIb09vjctBmye5dhpqWVfNT/LHJPdKcsM01wL8L8cmdJNjE7rJsQnd47iEbho5Nu+b5LLJHnxtCyuvTnJnkm1GtW+T5PKxdmitLUuybFTz9ZNf2sw1cDr9Da017y10hGMTusmxCd3k2ITucVxCNw0cm4vbFJyyvVZdYKe1dluSc5M8dqStqmb1H5+zov0AAAAAgO5b22ZWJr1Tuj9bVT9O8sMkr0iyUZLPTGdRAAAAAMDErHVhZWvthKraKslbkmyb5LwkB7XWrpjWwma2ZUnenHuebg9ML8cmdJNjE7rJsQnd47iEbprSY3Otuho4AAAAALDuWqvWrAQAAAAA1l3CSgAAAACgE4SVAAAAAEAnCCsBAAAAgE4QVrLaqmq7qnpHVZ1VVTdUVauqA1dj/6P7+4y+3Tp1VcO6b6LHZn+MHarqxKpaUlXXV9WXq+reU1MxzBxVtWlVfaKqrqqqm/rH6T7j3PfYFXxuXjjVdcO6oKrmVdW/VtVlVXVLVf2gqh4/zn19LsIUGPa49LckTK2q2riq3lxVZ1TVtf3j67DV2H/o77yD5qzuDpBkjySvTXJRkvOT7DfkOC9NcuPA4zsnWBfMdBM6Nqtq4yRnJVmQ5G1Jbk/yT0m+XVUPbq1dM7nlwsxQVbOSnJ5kryTvSnJ1kpclObuqFrbWLhrHMMuSvHhU29JJLRTWXccmeUaS96f3GXlYkq9U1aNba99b0U4+F2FKHZshjssB/paEqbFlkn9O8ockP0ty4Hh3nKTvvEmElQzn3CRbtNaurapnJPnPIcf5Ymvt6kmsC2a6iR6bL0uye5J9W2s/SpKq+mqSXyQ5IsnrJ7NYmEGekeQRSf66tfbFJKmqE5P8Jsmbkxw6jjHuaK19fupKhHVTVe2b5NlJXt1ae3e/7d/T+2x7Z3rH5or4XIQpMMHjcoS/JWFqLE6yXWvt8qp6SJIfrca+k/GdN4nTwBlCa+2G1tq1kzBUVdUmVVWTMBbMeJNwbD4jyY9G/iDrj3lhkm8meeZE64MZ7BlJrkhy8khDa+2qJCcm+cuqmjeeQapqdlVtMjUlwjrrGenNuPrESENr7dYkxyTZr6p2XMW+Phdh8k3kuBzhb0mYAq21Za21y4fcfVK+8ybCSqbXb9M7he2Gqvp8VW0z3QXBTNWfsv+gJD8eY/MPk+xWVfPXbFWwztg7yU9aa8tHtf8wyYZJ/mwcY2yY5PokS/vrB32kf4oqsHJ7J/lNa+36Ue0/7N8/eKydfC7ClBrquBzF35LQPZPxnTeJ08CZHtcl+XCSc9Jbg2v/JIcn2beqHjLGhxYw9TZPMi+9af+jjbRtn+TXa6wiWHdsl+Q7Y7QPHlvnr2T/xemdFveT9P6h+aD0Tk/dq6oObK3dMYm1wrpmu6z6s20sPhdh6gx7XCb+loQum+h33rsIK2e4/r8azx1n92WttTbR52ytfWBU00lV9cMk/5HeH1/vmOhzwNpuGo7NDUbGGmPbraP6wIw15LG5QSZwbLXWjhzVdHxV/SbJv6R3us3x46wHZqJhjz+fizB1hv5c9LckdNqEvvMOcho4ByS5ZZy3PaaqiNbaF5JcnuRxU/UcsJZZ08fmLf37sdYRWX9UH5jJhjk2b8nkH1vvS7I8PjdhVYY9/nwuwtSZ1M9Ff0tCZ0zasW1mJRcmecE4+441VX8yXZreKTfAmj82r03vX8G2G2PbSNtlk/A8sLYb5thcnEk+tlprt1TVNfG5CauyOMkOY7Sv6vjzuQhTZ9jjcmX8LQnTb9K+8worZ7j+VZ6One46+ldx2yXJT6e5FOiENX1sttaWV9X5SR4yxuaHJflta+2GNVUPdNWQx+Z5SfavqlmjFhx/WJKbk/xmdevoX9hjyyRXre6+MMOcl+TRVbXJqLXsHjaw/R58LsKUOi9DHJcr4m9J6IzzMknfeZ0GzpSqqp2q6r6j2rYao+tLk2yV5Iw1UhjMcGMdm0m+mOShVfWQgX57JHlMkv9ck/XBOuaLSbZJ8vSRhqraMslfJzm1tbZsoH23qtpt4PH6K7ji8JuSVHxuwqp8McnsJP93pKGq5qU3Q/oHrbVL+20+F2HNGfq49LckdENVbVdV962q9Qaax/2dd5XjT8L1UpiBquqN/f98QJJnJ/l0kt8lSWvt/w30OzvJo1prNdB2c5IT0rsK1K1JHtkf42dJ/ry1dvMaeAmwTprgsTk/vX+Rnp/k3UluT/LK9L5MPri1ZgYXDKGqZif5XpI9k7wrydXpXQRgpyQPba39eqDvoiRpre3Sf7xLesflcemdgp4kT0jypPT+KHvyqH+5BkapqhOTPC29tV4vTvL8JPsmeWxr7Tv9PmfH5yKsMRM4Lv0tCVOsql6eZNP0rt790iQn539nLn+otba0qo5N77jdtbW2qL/fuL/zrrIGYSXDqKoV/uKM+jA5O/f8gPlkkkck2TG9hVZ/n+SkJP/idBqYmIkcm/32e6X3pfEv0pt9f3aSf2qtXTwV9cJMUVWbpfel7a/SuxLij5K8qrX241H9FiV3Cys3TfKhJA9P7wvj7PT+qPuPJO9urd2+JuqHtVlVrZ/krUmem2SzJD9P8qbW2pkDfc6Oz0VYY4Y9Lv0tCVOv/3105xVs3rW1tmissLK/77i+866yBmElAAAAANAF1qwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBOElQAAAABAJwgrAQAAAIBOEFYCAAAAAJ0grAQAAAAAOkFYCQCwlqmqw6qqVdUuQ+6/e1V9raqW9sf5q4mOCQAAk2HOdBcAAMAa99kkuyZ5Q5IlSX6c5HHTWRAAACTCSgCAtdHnkhyfZNnq7lhVGyTZL8m/tNY+PNA+edUBAMCQnAYOALCWaa3d2Vq7tbXWhth9q/79kkksiQFVtdF01wAAsLYSVgIArGXGWl+yqhZV1WlV9ciq+mFV3VpVv62q5w30OTrJ7/sP39UfY9FKnqf19xndvqiqjh3VtmlVvb+qLq2qZVV1cVW9tqpmjeo3q6r+sarO79d4VVWdUVUPGdXvuVV1blXdUlXXVtXxVbXjON6b+f06FvXruLKqvl5V+4zq97Cq+kpVXVdVN1XVz6vqH0f1eUxVfbe/fUlVfbmq7jeqz9H99+n+VfWFqrouyfcm+joAAGYqp4EDAKw77pPki0mOSW9dyhcmObaqzm2t/TLJyenNqHxfkuOSfCXJjRN90qraMMm3k+yQ5ONJ/pDkEUnenmS7JK8Y6H5MksOSfDXJp9L7Prp/koent3ZmquoNSd6a5MR+n62S/H2S71TV3q21JSsp59+SPCPJh5NckGSLJI9Mcr8kP+mP//gkpyVZnOQDSS7vb39K/3Gq6nH9Gn+b5OgkG/Rr+O+q2qe1tmjU8/5nkouSvD5JTcLrAACYkYSVAADrjj2SHNBa+26SVNWJSS5N8oIkr2qt/byqrk8vrPxJa+3zk/S8r0yyW5K9W2sX9ds+XlWXJXl1Vb2ntXZpVT06vaDyg621wVmM76n+oplVtXOSNyd5Y2vtbSMdqurkJD9N8rIkb8uKPTnJJ1trRwy0vXNgnNnpBaqLkzx4MDAcqaHvXUmuTbJfa+3a/vYv9Wt4c5Lnj3ren7XWDh0Ya6KvAwBgRnIaOADAuuOCkaAySVprVyX5dZJ7T/Hz/nWS7ya5rqq2HLkl+UaS2UkO6Pc7JElLL8S7m4H1N5+e3nfUE0eNdXl6MxcfvYpaliR5WFVtv4Lte6d3JfT3j57ZOFJDVW2X5MFJjh0JKvvbf57k60meNMa4/zbq8URfBwDAjGRmJQDAuuMPY7Rdl2SzKX7e3ZM8KMlVK9i+df9+tySXDQaAKxir0gv0xnL7Kmp5TXqnwF9aVeemd6r7v7fWfjtQQ5L8YiVj7Ny///UY236V5AlVtVFr7aaB9t+N6jfR1wEAMCMJKwEA1h13rqC9VtA+rNmjHs9Kb8bhO8fomyS/WY2xZ6U3+/KJGfv1rHSNzdbaiVX13SRPS/IXSV6d5LVV9fTW2ldXo47VdcuoxxN6HQAAM5WwEgCAFbkuyaaDDVU1N72L5gy6JMnGrbVvrGK8S9Kblbj5SmZXXpJeuPq71trqhJx3aa0tTvLRJB+tqq3Tu7DOG9K7YM4l/W57pnea+lhGrpi+xxjb7pvk6lGzKscy4dcBADATWbMSAIAVuST/u97kiP+be86sPDHJflX1hNEDVNWmVTXyD+QnpRfgHTVGv5HZnyenNxPxqFEXvEn1bLGiYqtqdlUtGGxrrV2Z5LIk8/pNP0nvlO1XVNWmY9XQDzvPS/L8wT5VtWd6szW/sqIaBgz9OgAAZjIzKwEAWJFPJfm3qjopvdO890ryhCRXj+r3riRPTXJaVR2b5NwkGyV5YJJnJNklvdmIZ1XV55L8Q1XtnuSM9P7xfP8kZyX5cGvtkqp6Y5K3J9mlfwXuG9K7KM7TknwiybtXUO/8JH+sqi8m+Vl6p1o/LslDkxyRJK215VX10iSnJjmvqj6T3pXB75vkAf3Xl/ROH/9qknOq6pgkGyT5+yRLkxy9qjdugq8DAGDGElYCALAin0wvXHtRkoPSu+L345N8c7BTa+3mqnpUktend2Xw5yW5Pr21Ko9KL+Ab8YIkP++P+a7+th8n+Z+B8d5RVb9J8k/531mYlyb5WpJTVlLvzemd/v0X+d+rcV+c5GWttY8NjH9mVT26P/YR/X6X9F/vSJ9vVNVB6V25/C3pXRDn20le21obfTGdMU3gdQAAzFjVWpvuGgAAAAAArFkJAAAAAHSDsBIAAAAA6ARhJQAAAADQCcJKAAAAAKAThJUAAAAAQCcIKwEAAACAThBWAgAAAACdIKwEAAAAADpBWAkAAAAAdIKwEgAAAADoBGElAAAAANAJwkoAAAAAoBP+P5Q3TDvq2i6MAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -557,8 +566,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Average influence of corrupted points: -1.0782924\n", - "Average influence of other points: 0.10896263\n" + "Average influence of corrupted points: -1.079615\n", + "Average influence of other points: 0.109443866\n" ] } ], @@ -612,6 +621,7 @@ "execution_count": 19, "id": "1e096222", "metadata": { + "editable": true, "slideshow": { "slide_type": "" }, @@ -623,7 +633,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABTcAAAObCAYAAACGurUMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAADcTklEQVR4nOzdd5htZ1k34N+TSk+ooQQIivTeUcHQI70pTamCFCkCChEpoUY6GAVBqnQMTekgvYQOIuWjBQgQOgk1AfJ8f7xryGaY0+bMnJl1zn1f175m9mrz7rX3rL3Wb72lujsAAAAAAHOz10YXAAAAAABgNYSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAKyrqrpPVX2mqn5eVV1V99voMs3BtK/euUbbukJVvbWqvjdt9xPT9OdPzw9Zi78DALCrCTcBAJJU1UOmkKer6sIbXZ7dRVXdOsnTkvwiyVOTHJHkg7vobx86vZ+P2BV/b7OqqjMleX2SKyV5WcZ78MwNLRQAwBrZZ6MLAACw0aqqkvxVkk5SSe6a5IEbWqjdxw2Xfnb3Nze0JPNz0SQ/W4PtXCnJOZI8pLsfuwbbAwDYNNTcBABIrpvkkCQvSHJ8kjtU1X4bWqLdx7mTRLC547r7c939tTXY1Lmnn94DAGC3I9wEABg1NZPk2UlenORsSW62uEBVvWlq4nzplTZQVbea5j9x2fSzVNXjquqzU5+TJ1TV26vquits447TNu5YVYdV1Tun5XthmZtW1Yuq6v9V1U+nx0enfi1XPLerqgtV1dFV9cNp+fdX1Q0W/94K6xxcVUdV1Zer6qSq+n5Vva6qrriNfbm0/iOmcl9jer7U5L+XLXeRqd/Hr1fVyVX17ap6yUpdA0yv48iq+khVfXcq11er6llVdfCyZZ+f5B3T04cv/v2qOnSxjEvPl61/yDTv+cu3O03/vaq6d1V9anpf37mwzHa/59vYh7/T5+ZimavqllX1oar6WVX9oKpeVlXnWf4aMkL7JHnewj6441b+7lab81fVsVV17Bbm3aaq3lFVP6qqX0z74B+rav8tvb6qOtv0Hn5rek//r6rutJXyXbeq/quqvjMt//Wqem1VXXuFZa9XVW+o0dfoSVX1pap6QlUduKXtAwDzolk6ALBHq6qDktw4yf/r7vdX1YlJHpDkbklevrDoC5JcL8ntp/nL3WH6+fyFbZ8/yTszaoW+J8mbkpw+o6n2m6rqr7v72Sts65ZJDkvyxoy+Ec+/MO/IJKckOSbJN5IckOSaGf1aXjHJXy57fRdJ8v4kZ87od/FTSX4vyauTvGEL++RySd6S5CxJ3pzkVRmB702TvLeqbtbdK6674J3TzztO5T9ihb9z2LTtfZP8V5IvJjk4yc2T3KCqrtHdH1tY5eZJ7p4RWr4/yclJLp7RpcCNquoK3f2NadnXTD/vkORdC+VJkmO3Ufbt8bQkV8vYp29I8uvpNa32Pd9R98z43L4u4/VdOcmtkly6qi7T3Scl+VHGfr9MkpskeW2ST0zrfyJrrKqem+ROSY5LcvT096+S5FFJrlVV1+nuXy1b7cAk78t4L/8zyf5J/izJc6vqlO5+weLCVXVEkocl+UnGe/z1jJqpf5jkL5K8bWHZhyd5RJIfJPnvJN9JcqmMLieuX1VX7e4T1+TFAwAbp7s9PDw8PDw8PPbYR5IHZ/S1efjCtI9kBIgXXJh2moyw5vgk+yzbxjmT/CrJR5dNf+e0nVsvm35gRrj08yQHLUy/41SWU5IctoXy/v4K0/bKCF87yZWXzXv7NP0ey6b/6TS9k9xxYfo+GSHjL5L8ybJ1zp0RqH4ryf7buX/fOU45f2f6mZP8MMn3klxs2bxLZIRXH1s2/Twr/d2MbgV+neQZy6YfOr2+R2yhbI+Y5h+6wrxDpnnPXzb9+dP0byS5wBZe73a/59vYd53knVso84lJLrls3kumeX++bPrS5+qOK/yNpddzyA7st2OTHLuFv/GqJKfdQpnvu8Lr6yT/nmTvhekXy/h/+swK73Mn+XKS86xQroMXfr/GtOz7kxy4hbI+ZXveBw8PDw8PD4/N/dAsHQDYY1X9ZiChU5K8cGHW83PqwEJJku7+RZJXJDkoowbnor9IsndObf6bGs3X/yTJ0d39ssWFu/tHSR6eEZjeYoWivba737RSmbv7SytMOyWjJmEWy1ZV582o1fnFJP+2bJ03ZqGW24IbJPn9JP/c3e9ats43kzw+I8y91krl2wG3zwj8Ht7dn1n2dz6d0UXAZavqYgvTv9GjRmKWLf+WJP+X331f1tPju/srixN28j3fUU/v7v9dNm2pRuiV1mD7O+q+GYHknbv758vmPSrJ95PcboX1fpbk/t3966UJ0+fhfUkuWlVnWFj23tPPB/SpNXSzsN5xC0/vM/2867TvF5d7fkbQvFJ5AICZ0SwdANiTXTMjyHvzsrDkJUmelOSOVfWP3f3LafrzMwLPO2Q0R15yhyS/nNZbctXp5wFb6Lvw7NPPi64w70NbKnBVnTXJ3yW5fkbz8tMvW+Q8C79fZvr5gSkAXe69SZb3U7hU7vNvodx/MP28aLbQrH07Lf2dS2/h71xo4e98JvlNGH27jJp3l86o/bn3wjon70R5dtRK79HOvOc76iMrTPv69PPMa7D97VZVp8t4P76X5H7jbfodJ2Xl1/2FXrlp+OJr+cn0+1UyalyuGPwvc9WM/8k/q6o/W2H+fknOXlVn7e7vb8f2AIBNSrgJAOzJ7jb9fP7ixO7+QVX9V0YNu5tk9AWYHn1y/r8kN66qM3f3D6f+KS+R5DXd/b2FzZx1+nmd6bElZ1hh2vErLTgNgvLhJBfICNdemNGf4K8yakHeN6PPwiUHTD+/vYW/vdL0pXKvFAgtWqncO2Lp79x1q0v99t95cpL7ZTSLf3NG0/ClWoJ3zG/3TbreVnqPduY931E/WmHaUn+We68wbz2dOaOm89kzaqfuiB9tYfpKr+XAJD9coWboSs6aca2zrfKcIaNWKQAwU8JNAGCPVFVnzxggJ0leWlUv3cKid8sUbk5emOTRGYO3PDOnDiT0gmXrnTD9vG93P30Hi9dbmP5XGcHmEd39iMUZVXXVjHBz0VKNuIO2sL2Vpi+V+ybd/bptF3XVlv7Opbv7U9tauKrOkdHU+NNJ/rC7f7xs/m1WUYal2qwrnRMfuI11V3qPduY930y2tl+SsW9+tPB86XV/vLsvt05lyvQ3z1pVp92OgPOEJHt191nWsTwAwCagz00AYE91h4ymqR9N8pwtPL6b5NpVdYGF9V6YEf7coar2TXKbjOa4i83Uk+SD08+rrWGZLzj9PHqFeX+ywrRPTD+vWlUrnff98QrT1qPcK9nRv/N7Geeub1kh2Dx4mr/cUj+OW6rJ+MPp53lXmHeF7SzXol2179bbFvdLVV0wp9YITpJ0908y+jy9eFWtZ5j4wYwaoodt57JnrqqLr2N5AIBNQLgJAOyplppD37O7/2qlR8YgPEuDDiVJuvvrSf4no/+/+2Y0xX3JQr+cS8t9JMl7kty8qu68UgGq6pJTjcTtdez089Bl27lsksOXL9zdX8sYvfuCSf562TqH5Xf720yS1yb5UpJ7VdX1t1Duq079LO6M52XUxHt4Vf3OADhVtVdVHbow6djp5x9X1d4Ly50hYyCdlWoZLjU3Pt8WyrDUb+adquo3608DMT1sm69gmXV6zzfC5zJq/d5ksaxVddokW6qR+uSMmwXPnbpP+C1VdeapC4ed8c/TzydV1XmWz1w27SnTz2dX1blXWPb0VXWVnSwPALAJaJYOAOxxptDsQkn+t7u3OHhPRu3Nh2SEXw/v7qV+AF+QEQw+duH5Sm6bEYQ+p6ruk+SYjEDv4CSXyuir86pJvrOdRX9hxmBCT62qayT5QsYAPzdM8qqMpvLL3Stj5Ol/ncLKT2XUcrxFRpB5k5zaDDnd/cuqunlGn5avr6r3Z9QA/VlGTb4rTuufa5q2Kt39/aq6ZZJXJ/lgVb09o/ZfT3/nqhn9Jp5mWv74qnpZklsn+URVvSWjBuF1kvxiKuNllv2Zz2f0y3nrqvplkq9O2/+P7v5qdx9TVe9OcvUkH6qq/8loqn+j6fWvVKNzW9b6Pd/lps/A05I8NMnHq+rVGdcN10nyzemxfJ3nVtXlk9wzyZeq6s1JvpbkLBldKVw9I9C++06U6y1V9egk/5jks1X1moyBhw7KqIX8wYy+V9Pdb6+qByd5XJIvVNUbknwlo4/N82fUdH5vtq8WKACwiQk3AYA90VKtzX/f2kLdfWxVvS0j1LlRRhCXjCDxX5KcKcmnu/tjW1j/uCnwuXdGmHi7jCbSx2eMAP7PSf53ewvd3d+sqqslOTIjzLleRi27eyZ5W1YIN7v7M1N/nI/NGB3+mhkB580yRq++SU7tm3NpnU9V1aWT3D8jOL1TRgD6rSQfzxikZXHwpFWZAqhLJXng9FquljHi+TczAsLlze/vkuTL0+u8V0a3Aa/LqGX5O031u/vXVXWzjP31Z0nOmFET970ZQWcyXv8Tpp/3zgiM/z7JW5L8+Spe05q+5xvo4Rnh9V0z+p09PsnLkjwi0+j1y3X3varqjRkB5rUz+ub8QUbI+YQkL9rZQnX3Q6vqAxn9r94wyekzguKPZIT/i8v+U1W9b1r2jzPe4xMyAu9nJXnJzpYHANh41b2l/uoBANidVdWLM2oaXqS7P7/R5QEAgB2lz00AgN3Y1HflOVeYfq2MGpCfEWwCADBXmqUDAOze9kvy9ap6R0YT9l8luXhGU/uTM5p3AwDALGmWDgCwG5tGFn9qRl+bByc5XUZ/me9OcmR3f3zjSgcAADtHuAkAAAAAzJI+NwEAAACAWdLn5jqoqkpy7iQ/3uiyAAAAAMBMnTHJN3srTc+Fm+vj3EmO2+hCAAAAAMDMHZzkG1uaKdxcH0s1Ng+O2psAAAAAsKPOmFF5cKvZmnBzff24u0/c6EIAAAAAwJyMXh+3zYBCAAAAAMAszT7crKp7VdWxVfWLqjqmqq60lWXvWlXvqaofTo+3LV++hkdW1beq6ufTMn+w/q8EAAAAANgRsw43q+pWSZ6c5Igkl0vyySRvrqpzbGGVQ5O8NMk1klw1ydeTvKWqzrOwzN8nuU+Suye5cpKfTts8zXq8BgAAAABgdWorI6lvelV1TJIPd/ffTM/3yggs/7m7j9yO9fdO8sMkf9PdL6zRmP+bSZ7U3U+cljkgybeT3LG7X7ad5TpTkhOSHKDPTQAAAADYMdubr8225mZV7Zfk8knetjStu0+Znl91OzdzuiT7JvnB9PwCSc65bJsnJDlma9usqv2r6kxLj4zRnAAAAACAdTTbcDPJ2ZLsnVGrctG3MwLK7fFPGTU1l8LMpfV2dJuHZyTJS4/jtvPvAwAAAACrNOdwc6dU1YOT3DrJzbr7Fzu5ucclOWDhcfBObg8AAAAA2IZ9NroAO+F7SX6d5KBl0w9KcvzWVqyqByZ5cJJrd/enFmYtrXdQkm8t2+YntrS97j4pyUkL299G0QEAAACAnTXbmpvdfXKSjya51tK0aUChayX5wJbWq6q/T/LQJId190eWzf5KRsC5uM0zZYyavsVtAgAAAAC73pxrbibJk5O8oKo+kuRDSe6X5PRJnpckVfXCJN/o7sOn5w9K8sgkt01ybFUt9aP5k+7+SXd3VT01yT9W1Rcyws5HZfTL+Zpd9aIAAAAAgG2bdbjZ3S+vqrNnBJbnzGg6flh3Lw0IdL4kpyysco8k+yX5z2WbOiLJI6bfH58RkD4ryYFJ3jttc2f75QQAAAAA1lB190aXYbczNWU/IckB3X3iRpcHAAAAAOZke/O12fa5CQAAAADs2YSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZpn40uAAAAABzy4Nf3RpdhMzr2yBvURpcBYDNTcxMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWTJaOgBrykinW2a0UwAAgLWl5iYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADM0uzDzaq6V1UdW1W/qKpjqupKW1n24lV19LR8V9X9VljmEdO8xcfn1vVFAAAAAAA7bNbhZlXdKsmTkxyR5HJJPpnkzVV1ji2scrokX07y4CTHb2XT/5fkXAuPP16rMgMAAAAAa2OfjS7ATrp/kmd39/OSpKrunuQGSe6c5MjlC3f3h5N8eFr2d+Yv+FV3by38/C1VtX+S/RcmnXF71wUAAAAAVme2NTerar8kl0/ytqVp3X3K9PyqO7n5P6iqb1bVl6vqxVV1vm0sf3iSExYex+3k3wcAAAAAtmG24WaSsyXZO8m3l03/dpJz7sR2j0lyxySHJblHkgskeU9Vba025uOSHLDwOHgn/j4AAAAAsB3m3ix9zXX3Gxeefqqqjkny1SR/nuQ5W1jnpCQnLT2vqnUtIwAAAAAw75qb30vy6yQHLZt+ULY+WNAO6e4fJfl/SS64VtsEAAAAAHbebMPN7j45yUeTXGtpWlXtNT3/wFr9nao6Q5LfT/KttdomAAAAALDz5t4s/clJXlBVH0nyoST3S3L6JEujp78wyTe6+/Dp+X5JLjatu1+S81TVZZL8pLu/OC3zxCT/ldEU/dxJjsioIfrSXfOSAAAAAIDtMetws7tfXlVnT/LIjEGEPpHksO5eGmTofElOWVjl3Ek+vvD8gdPjXUkOnaYdnBFknjXJd5O8N8lVuvu76/MqAAAAAIDVmHW4mSTdfVSSo7Yw79Blz49NstXRfrr71mtVNgAAAABg/cy2z00AAAAAYM8m3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzNLsw82quldVHVtVv6iqY6rqSltZ9uJVdfS0fFfV/XZ2mwAAAADAxph1uFlVt0ry5CRHJLlckk8meXNVnWMLq5wuyZeTPDjJ8Wu0TQAAAABgA8w63Exy/yTP7u7ndfdnktw9yc+S3Hmlhbv7w939d939siQnrcU2k6Sq9q+qMy09kpxxJ14TAAAAALAdZhtuVtV+SS6f5G1L07r7lOn5VXfxNg9PcsLC47jV/H0AAAAAYPvNNtxMcrYkeyf59rLp305yzl28zcclOWDhcfAq/z4AAAAAsJ322egC7A66+6QsNHOvqg0sDQAAAADsGeZcc/N7SX6d5KBl0w/KFgYL2qBtAgAAAADrYLbhZnefnOSjSa61NK2q9pqef2CzbBMAAAAAWB9zb5b+5CQvqKqPJPlQkvslOX2S5yVJVb0wyTe6+/Dp+X5JLjatu1+S81TVZZL8pLu/uD3bBAAAAAA2h1mHm9398qo6e5JHZgz484kkh3X30oBA50tyysIq507y8YXnD5we70py6HZuEwAANp1DHvz63ugybFbHHnkDneIDwG5q1uFmknT3UUmO2sK8Q5c9PzbJNk9strZNAAAAAGBzmG2fmwAAAADAnk24CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALO2z0QUA2NUOefDre6PLsFkde+QNaqPLAAAAANtLzU0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWZp9uFlV96qqY6vqF1V1TFVdaRvL/1lVfW5a/n+r6vrL5j+/qnrZ403r+yoAAAAAgB0163Czqm6V5MlJjkhyuSSfTPLmqjrHFpb/wyQvTfKcJJdN8pokr6mqSyxb9E1JzrXwuM16lB8AAAAAWL1Zh5tJ7p/k2d39vO7+TJK7J/lZkjtvYfn7JnlTdz+huz/b3Q9N8rEkf7NsuZO6+/iFxw/X7RUAAAAAAKsy23CzqvZLcvkkb1ua1t2nTM+vuoXVrrq4/OTNKyx/aFV9p6o+X1XPqKqzbqMs+1fVmZYeSc64I68FAAAAANhxsw03k5wtyd5Jvr1s+reTnHML65xzO5Z/U5LbJ7lWkgcl+ZMkb6yqvbdSlsOTnLDwOG47yg8AAAAA7IR9NroAm013v2zh6f9W1aeSfCnJoUnevoXVHpfR9+eSM0bACQAAAADralU1N6vqfFX1x8umXbqqXlhVL6+qm65J6bbue0l+neSgZdMPSnL8FtY5fgeXT3d/efpbF9zKMid194lLjyQ/3kbZAQAAAICdtNpm6U9P8oilJ1V1UJJ3JLl5kqsnObqqbr7TpduK7j45yUczmo8vlWOv6fkHtrDaBxaXn1xnK8unqg5OctYk39qZ8gIAAAAAa2u14eaVkrx14fntk5w2yaWTnCej+fYDd65o2+XJSe5aVXeoqosmeUaS0yd5XpJMNUkft7D805IcVlUPqKqLVNUjklwhyVHT8meoqidU1VWq6pCqulaS1yb5YsbAQwAAAADAJrHaPjfPkuQ7C89vmORd3f2lJKmqVyV57E6WbZu6++VVdfYkj8wYFOgTSQ7r7qVBg86X5JSF5d9fVbdN8uipfF9IctPu/vS0yK+TXCrJHZIcmOSbSd6S5KHdfdJ6vx4AAAAAYPutNtz8bpLzJ0lVHZjkKkkevGy7u2Swou4+KlPNyxXmHbrCtFcmeeUWlv95kuutZfkAAIDdwyEPfn1vdBk2o2OPvEFtdBkA2HOtNoB8W5L7VNWJGaOI75XkNQvzL5bk6ztVMgAAAACArVhtuPngJBdK8sQkJyd5YHd/JUmqav8kf57kJWtSQtgDqRWwZWoGAAAAAEtWFW5OfVr+UVUdkOTn08jlS5ZGLFdzEwAAAABYNzvVL2Z3n7DCtJ8n+eTObBcAAAAAYFv2Wu2KVXW+qnpmVX2+qn5YVVefpp+tqp5eVZddu2ICAAAAAPy2VdXcrKqLJXlPRjh6TJILLm2ru79XVX+c5PRJ7rJG5QQAAAAA+C2rbZb++CQ/SnKVJJ3kO8vmvz7JrVZfLAAAAACArVtts/SrJ3lGd383I9xc7mtJzrPqUgEAAAAAbMNqw829kvxsK/PPnuSkVW4bAAAAAGCbVhtufizJDVaaUVX7JLl1kg+utlAAAAAAANuy2nDzcUkOq6pnJLnENO2gqrp2krckuWiSI9egfAAAAAAAK1rVgELd/caqumOSpyW52zT5RUkqyYlJbt/d716TEgIAAAAArGC1o6Wnu/+jql6V5LpJLphRC/RLSd7c3T9eo/IBAAAAAKxo1eFmknT3T5O8eo3KAgAAAACw3VYVblbV+bZnue7+2mq2DwAAAACwLautuXlskt6O5fZe5fYBAAAAALZqteHmnfO74ebeSQ5Jcvsk30nyL6svFgAAAADA1q12tPTnb2leVf1TkmOSHLDKMgEAAAAAbNNea73BaZCh5yX527XeNgAAAADAkjUPNxe2e8512jYAAAAAwKr73FxRVZ0pydWT/F2Sj6/ltgEAAAAAFq0q3KyqU7Ll0dIrydeS3HO1hQIAAAAA2JbV1tx8ZH433OwkP0zypSRv6e5f7UzBAAAAAAC2ZrWjpT9ijcsBAAAAALBD1mtAIQAAAACAdbVdNTer6rmr2HZ3911WsR4AAAAAwDZtb7P0a2bLAwhtyY4uDwAAAACw3bYr3OzuQ9a5HAAAAAAAO0SfmwAAAADALAk3AQAAAIBZWnW4WVV/WlVvrarvV9WvqurXyx9rWVAAAAAAgEWrCjer6hZJ/jvJQUleNm3npdPvP0/yqSSPXKMyAgAAAAD8jtXW3Dw8yYeSXDbJw6dpz+3u2yW5RJJzJfnKzhcPAAAAAGBlqw03L5bkZd396yS/mqbtmyTdfWySf03yoJ0uHQAAAADAFqw23PxZkpOTpLt/lOSkjNqaS76d5AI7VTIAAAAAgK1Ybbj5+Yzam0s+keQvq2qfqjpNktsm+dpOlg0AAAAAYItWG26+OslNqmr/6fljkhya5EdJvpvkakmO3NnCAQAAAABsyT6rWam7n5jkiQvP/7uqDk1y8yS/TvL67n7HWhQQAAAAAGAlqwo3V9Ld70nynrXaHgAAAADA1qyqWXpVvaKqbrbQLB0AAAAAYJdabZ+bf5Tk6CTfqar/qKobVtW+a1guAAAAAICtWm24eXDGAEIvSnKdJK9L8u2qek5VXbeq9l6j8gEAAAAArGhV4WYP7+7ueyU5d0bA+cokN0rypiTHV9Uz166YAAAAAAC/bbU1N3+ju0/p7rd3918nOVeSv06yX5K77uy2AQAAAAC2ZE1GS6+qcyX5syS3SnKVafL712LbAAAAAAArWXW4WVXnSHLLjEDzjzJqgX4oyQOTvKK7v7EmJQQAAAAAWMGqws2qenuSqyfZO8knkjwkycu7+9g1KxkAAAAAwFastubmOZIckRFofmENywMAAAAAsF1WFW529yXXuiAAAAAAADtip0dLBwAAAADYCMJNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACztKrR0pdU1f5JLpfkHEne193fW5NSAQAAAABsw6prblbVfZJ8K8l7k7wqyaWm6Werqu9V1Z3XpogAAAAAAL9rVeFmVd0pyVOTvCnJXZLU0ryp9ub/JLn1GpQPAAAAAGBFq625+YAkr+3u2yb5rxXmfzTJxVddKgAAAACAbVhtuHnBJG/cyvwfJDnrKrcNAAAAALBNqw03f5TkbFuZf7Ekx69y2wAAAAAA27TacPMNSe5WVQcun1FVF09y1ySv24lyAQAAAABs1WrDzX9MsneSTyd5dJJOcoeqelGSjyT5TpJHrkkJAQAAAABWsKpws7u/meTyGaOl3ypjtPS/THKjJC9NcpVp1HQAAAAAgHWxz2pX7O7vJPmrJH9VVWfPCEq/292nrFXhAAAAAAC2ZNXh5qLu/u5abAcAAAAAYHutqll6VT26qj6xlfkfr6qHr7pUAAAAAADbsNqam7dM8uqtzH9DRl+cR6xy+wDACg558Ot7o8uwWR175A1qo8sAAADsWqsdLf18Sb60lflfSXL+VW57h1TVvarq2Kr6RVUdU1VX2sbyf1ZVn5uW/9+quv6y+VVVj6yqb1XVz6vqbVX1B+v7KgAAAACAHbXacPMn2Xp4eYEkv1jltrdbVd0qyZMzaoheLsknk7y5qs6xheX/MGM09+ckuWyS1yR5TVVdYmGxv09ynyR3T3LlJD+dtnmadXoZAAAAAMAqrDbcfGeSv66q8yyfUVXnTXK3JO/YiXJtr/sneXZ3P6+7P5MRSP4syZ23sPx9k7ypu5/Q3Z/t7ocm+ViSv0lGrc0k90vy6O5+bXd/Ksntk5w7yU23VIiq2r+qzrT0SHLGtXl5AAAAAMCWVPeOd91VVRdO8qEknVEL8v+mWZfICBYryVW6+7NrVM6VyrBfRpB5y+5+zcL0FyQ5sLtvssI6X0vy5O5+6sK0I5LctLsvXVW/l9Hc/rLd/YmFZd6V5BPdfd8tlOURSVYaQOmA7j5xx1/d5qfPt5Xp7w1g3ny/bdlafMfZv1vmHAJYb47BK1ur46/9uzL7d33t7ucPUwXCE7KNfG1VAwp19+er6mpJ/jnJ3y6b/e4k91nPYHNytiR7J/n2sunfTnKRLaxzzi0sf86F+dnGMit5XEbz+CVnTHLcVpYHAAAAAHbSakdLz9Rk+0+q6mxJfm+a/OXu/t6alGxGuvukJCctPR+t2wEAAACA9bTqcHPJFGZuRKD5vSS/TnLQsukHJTl+C+scv43lj1+Y9q1ly3xitQUFAAAAANbeqsPNqto7yfUyam2eOaOfzUXd3Y/aibJtVXefXFUfTXKtjFHPU1V7Tc+P2sJqH5jmP3Vh2nWm6UnylYyA81qZwsypff+VkzxjLcsPAAAAAOycVYWbVXWFJEcnOTi/G2ou6STrFm5OnpzkBVX1kYwBju6X5PRJnjeV84VJvtHdh0/LPy3Ju6rqAUlen+TWSa6QMbp7urur6qlJ/rGqvpARdj4qyTczBagAAAAAwOaw2pqb/5rktElumuQ93f2jtSrQjujul1fV2ZM8MmPAn08kOay7lwYEOl+SUxaWf39V3TbJo5M8NskXMkZK//TCZh+fEZA+K8mBSd47bfMX6/tqAAAAAIAdsdpw81JJHtLd/7WWhVmN7j4qW2iG3t2HrjDtlUleuZXtdZKHTQ8AAAAAYJPaa5XrHZctN0cHAAAAAFh3qw03/ynJXafBdgAAAAAAdrnVNks/Y5KfJPliVb0sydeT/HrZMt3dT9mZwgEAAAAAbMlqw80nLvz+N1tYppMINwEAAACAdbHacPMCa1oKAAAAAIAdtKpws7u/utYFAQAAAADYEautuZkkqarzJLl6knMkObq7j6uqvZMckOSE7l7eDycAAAAAwJpY1WjpNTw5yVeSvDjJk5NcaJp9hiTHJrn3WhQQAAAAAGAlqwo3k/xdkvtmDCx0nSS1NKO7T0jyqiS32OnSAQAAAABswWrDzbsmeWF3/0OST6ww/1M5tSYnAAAAAMCaW224ed4k79/K/J8mOdMqtw0AAAAAsE2rDTe/kxFwbsnlk3xtldsGAAAAANim1Yabr0py96r6vYVpnSRVdd0kd0zyyp0rGgAAAADAlq023Hx4km9l9Lf5woxg80FV9d4kb8zoc/Oxa1FAAAAAAICVrCrcnEZEv0qSxyc5T5JfJPmTJAcmOSLJ1br7Z2tURgAAAACA37HPalfs7p8nefT0AAAAAADYpVbbLB0AAAAAYEOtquZmVT13Oxbr7r7LarYPAAAAALAtq22Wfs1Mo6Mv2DvJuaaf303y050oFwAAAADAVq0q3OzuQ1aaXlX7JvnrJPdLcp1VlwoAAAAAYBvWtM/N7v5ldx+V5C1JjlrLbQMAAAAALFqvAYU+meTq67RtAAAAAIB1Czevk+Rn67RtAAAAAIBVj5b+sC3MOjCjxublkhy5yjIBAAAAAGzTakdLf8QWpv8wyZeS3D3Js1e5bQAAAACAbVrtaOnr1ZwdAAAAAGC7CCkBAAAAgFnarpqbVXW+1Wy8u7+2mvUAAAAAALZle5ulH5ukV7H9vVexDgAAAADANm1vuHmndS0FAAAAAMAO2t5w84dJPtLd31zPwgAAAAAAbK/tHVDo1UkOXXpSVV+uqhuvS4kAAAAAALbD9oabP05y4MLzQ5KcYa0LAwAAAACwvba3WfqHkjykqg5KcsI07fpVdc6trNPd/ZSdKh0AAAAAwBZsb7h5zyQvTPLQ6Xknue302JJOItwEAAAAANbFdoWb3f3FJH9YVadJco4kxya5X5LXrlvJAAAAAAC2YntrbiZJuvsXSb5WVUck+Z/u/ur6FAsAAAAAYOt2KNxc0t1HrHVBAAAAAAB2xKrCzSSpqosmuVOS30ty5iS1bJHu7mvtRNkAAAAAALZoVeFmVf1lkucl+WWSzyf54UqL7US5AAAAAAC2arU1Nx+R5ONJ/rS7v7d2xQEAAAAA2D57rXK9cyd5rmATAAAAANgoqw03P5URcAIAAAAAbIjVhpv3T3KXqvrDtSwMAAAAAMD2Wm2fmw9KckKS91TVZ5J8Lcmvly3T3X2TnSkcAAAAAMCWrDbcvFSSzgg1z5DkYiss06stFAAAAADAtqwq3OzuQ9a4HMzMsUfeoDa6DAAAAADs2Vbb5yYAAAAAwIbarpqbVXW+JOnury0+35al5QEAAAAA1tr2Nks/NklX1Wm7++Sl59ux3t6rLBcAAAAAwFZtb7h554ww85fLngMAAAAAbIjtCje7+/lbew4AAAAAsKsZUAgAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZmm24WZVnaWqXlxVJ1bVj6rqOVV1hm2sc5qq+peq+n5V/aSqjq6qg5Yt0ys8br2+rwYAAAAA2FGzDTeTvDjJxZNcJ8kNk1w9ybO2sc5TktwoyZ8l+ZMk507yqhWWu1OScy08XrMmJQYAAAAA1sw+G12A1aiqiyY5LMkVu/sj07R7J3lDVT2wu7+5wjoHJLlLktt29/9M0+6U5LNVdZXu/uDC4j/q7uN3oDz7J9l/YdIZd/hFAQAAAAA7ZK41N6+aEUB+ZGHa25KckuTKW1jn8kn2nZZLknT355J8bdreon+pqu9V1Yeq6s5VVdsoz+FJTlh4HLfdrwQAAAAAWJW5hpvnTPKdxQnd/askP5jmbWmdk7v7R8umf3vZOg9L8ucZzd2PTvKvSe69jfI8LskBC4+Dt/kKAAAAAICdsqmapVfVkUketI3FLrqeZejuRy08/XhVnT7J3yV5+lbWOSnJSUvPt13REwAAAADYWZsq3EzypCTP38YyX05yfJJzLE6sqn2SnGWat5Ljk+xXVQcuq7150FbWSZJjkjy0qvafQkwAAAAAYBPYVOFmd383yXe3tVxVfSDJgVV1+e7+6DT5mhnN7I/ZwmofTfLLJNfKaG6eqrpwkvMl+cBW/txlkvxQsAkAAAAAm8umCje3V3d/tqrelOTZVXX3jIGCjkrysqWR0qvqPEnenuT23f2h7j6hqp6T5MlV9YMkJyb55yQfWBopvapulFGT84NJfpHR7+Y/JHnirn2FAAAAAMC2zDLcnNwuI9B8e8Yo6Ucnuc/C/H2TXDjJ6Ram/e3CsvsneXOSey7M/2WSeyV5SpJK8sUk90/y7HV5BQAAAADAqs023OzuHyS57VbmH5sRUC5O+0VGeHmvLazzpiRvWrtSAgAAAADrZa+NLgAAAAAAwGrMtuYmAMBaO/bIG9S2lwIAADYLNTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWdpnowsAAAAArK9jj7xBbXQZANaDmpsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYpdmGm1V1lqp6cVWdWFU/qqrnVNUZtrHO3arqndM6XVUHrsV2AQAAAIBdb7bhZpIXJ7l4kuskuWGSqyd51jbWOV2SNyV57BpvFwAAAADYxfbZ6AKsRlVdNMlhSa7Y3R+Zpt07yRuq6oHd/c2V1uvup07LHrqW2wUAAAAAdr251ty8apIfLQWQk7clOSXJlXf1dqtq/6o609IjyRl3ogwAAAAAwHaYa7h5ziTfWZzQ3b9K8oNp3q7e7uFJTlh4HLcTZQAAAAAAtsOmCjer6shpoJ+tPS6y0eVcweOSHLDwOHhjiwMAAAAAu7/N1ufmk5I8fxvLfDnJ8UnOsTixqvZJcpZp3mqtarvdfVKSkxbW2YkiAAAAAADbY1OFm9393STf3dZyVfWBJAdW1eW7+6PT5Gtm1EQ9ZieKsF7bBQAAAADW2KZqlr69uvuzSd6U5NlVdaWq+qMkRyV52dKI5lV1nqr6XFVdaWm9qjpnVV0myQWnSZesqstU1Vm2d7sAAAAAwOYwy3Bzcrskn0vy9iRvSPLeJHdbmL9vkgsnOd3CtLsn+XiSZ0/P3z09v/EObBcAAAAA2AQ2VbP0HdHdP0hy263MPzZJLZv2iCSP2JntAgAAAACbw5xrbgIAAAAAezDhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAs7bPRBQAAYM9w7JE3qI0uAwAAuxc1NwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS7MNN6vqLFX14qo6sap+VFXPqaozbGOdu1XVO6d1uqoOXGGZY6d5i48Hr9sLAQAAAABWZbbhZpIXJ7l4kuskuWGSqyd51jbWOV2SNyV57DaWe1iScy08/nmnSgoAAAAArLl9NroAq1FVF01yWJIrdvdHpmn3TvKGqnpgd39zpfW6+6nTsodu40/8uLuP34Hy7J9k/4VJZ9zedQEAAACA1Zlrzc2rJvnRUrA5eVuSU5JceQ22/+Cq+n5Vfbyq/q6qthUCH57khIXHcWtQBgAAAABgK2ZZczPJOZN8Z3FCd/+qqn4wzdsZT0/ysSQ/SPKHSR6X0TT9/ltZ53FJnrzw/IwRcAIAAADAutpU4WZVHZnkQdtY7KLrWYbuXgwpP1VVJyf5t6o6vLtP2sI6JyX5zbyqWs8iAgAAAADZZOFmkiclef42lvlykuOTnGNx4tR0/CzTvLV0TMZ+OiTJ59d42wAAAMDMHXvkDdRygg2yqcLN7v5uku9ua7mq+kCSA6vq8t390WnyNTP6ED1mjYt1mYy+PL+zjeUAAAAAgF1oU4Wb26u7P1tVb0ry7Kq6e5J9kxyV5GVLI6VX1XmSvD3J7bv7Q9O0c2b0yXnBaVOXrKofJ/lad/+gqq6aMSDRO5L8OGPgoqckeVF3/3DXvUIAAAAAYFvmOlp6ktwuyecyAsw3JHlvkrstzN83yYWTnG5h2t2TfDzJs6fn756e33h6flKSWyd5V5L/S/KQjHBzcbsAAAAAwCYwy5qbSdLdP0hy263MPzZJLZv2iCSP2Mo6H0tylTUpIAAAAACwruZccxMAAAAA2IMJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzFJ190aXYbdTVWdKckKSA7r7xI0uDwAAAADMyfbma2puAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZpn40uwG7ujFW10WUAAAAAgLk54/YsJNxcH0s7/7gNLQUAAAAAzNsZk5y4pZnV3buwLHuGGtU1z53kxxtdlj3AGTNC5INjf68H+3d92b/ry/5df/bx+rJ/15f9u77s3/Vl/64v+3d92b/ry/5dX/bvrnfGJN/srQSYam6ug2mHf2Ojy7EnWGj2/+Pu3mKKz+rYv+vL/l1f9u/6s4/Xl/27vuzf9WX/ri/7d33Zv+vL/l1f9u/6sn83xDb3swGFAAAAAIBZEm4CAAAAALMk3GTuTkpyxPSTtWf/ri/7d33Zv+vPPl5f9u/6sn/Xl/27vuzf9WX/ri/7d33Zv+vL/t2EDCgEAAAAAMySmpsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmu62qum9VXWWjy7Enq6ra6DIAAMBmVVWuyQF2UnX3RpcB1lxVXSLJ+5J8Pcmrkzyju7+5saXac1TVObr7O9Pveyc5pR1stqqqqrt76edGlweAbXPMBnZUVe3V3adMlQDOvnTODMDquUvEbqm7P53krkmOTXKnJK+sqrtW1T4bWrA9x0eq6n1V9fvd/esptNt7owu12SzeqV8ebLqLD6w1x5Wdt9gioar2E2zuHrQ02THO6XZOd58y/fqIJP9RVadLHKNhRyz9v1TVXlV1xao6v2P5ns0BlN1Wd78iyZ8neXySfZM8LMnRVXXYhhZsN1dVZ0nyH0nOkeQLVfX0qtqnu389zXfcmSyd3FbVX1bV45McVVV/sWyeL+kZWBZ4+IyvwtI+tP/Wz8Jx5Y5VdfuqOs1Gl2mGlj6nf5bkGVV1rd+a6Zi96S1cEO9XVedPxg3GjS3V5rawz86WJEvndOy0SnKdJA9Ifiv0BLbfPyR5RZJbbOZjufOD9adZOrutpSYf0++HJLlXkhtlBJ1vTXLUVMOTNVZV+ya5UpLbJblNxsnb/bv7udP8Svbci4mpxsPe3X1yVd0jyZOTnJzkJ0kOTHJckvt19xsXlte0fxNa6E5gqYnZjZPcJMm5k7wjyTuTfKq7f7GR5dysqmrvhRsfZ8g4LPx0g4u1W5puMv2qqm6Y5DlJPp/kuit9NjW1XtnS57Wqfi+j65v3JDm8u7+0wUVjBywctx+W5FpJ/q27X7J8/saVcHNZ+H47Q5KXJTljkpt29w83uGi7hap6dJL7Jzk8yVFJOuO7cLf+DC4cTw9Icrkkf5Tk9Elem+Sr3f2tDS0gm9rC5+eiST6c5BlJntbdx03z901yuu4+YQPKttgSb/8kZ/F53jWEm+zWlu40L4ScV09yjyR/mORHGXd5ntnd39+oMu5ulh3Qz5zkBkn+PsklMr58HtDd753m/yaA3hNU1d2TvK27vzg9P0OS/5fkTUkemhG8Xy/J7ZNcNSOEv+fShbMLrs1p4cLv8knen+THSb6c5LJJjk/ygiT/2d2f2LhSbm5V9aQkh2aEwm/O2GefcPG8NpYdl7+c8Tl9ZHf/v2naAUnOlRFafKK7f7lhhZ2BqnpDkgMybkJ9eJp22iS3SPLtJO/r7p/tad9xc7BwQXxoRojyb0kesxEXwHOx8B33nCRXSfL87n7CsmWcn+yghf16/owbTpfMqHn23g0u2i5VVS9NcliSU5KclOScGf+XD+nuH2xk2dj8qurVSc6e5K+6+3NTmHiJJE9Isn+STyZ57FLouQvKs/R/vX+SOyb5qyRnTvLTjM/1y3yu14+mX+y2phOtUxYvLLr73d19myQPSfK9jKDzFVV1p40q5+5mqg2x1LfpjTP6PN07yZeSXCzJu6vqVVV1roXQebfvu6mqDk7ytCSfrqrHTPvo1xkh2Cu7+xvdfWx3/1vGF+E/JrlQks9X1TOmCzIXDptEVR1WVXevqnMvHGMelVGT60+7+0oZ79/7M5rLPL+q7rnUBJLfauZ4jyR/m1F7+R1Jrpnk9UmeWFVXWeqLjJ1XVXdOcpokz10INq+V5H+SfCYj7LmHplNbVlWXzahh9MIkn56mXSNj370gI5x/dVUdINjcfBaaUz854wbis7r7hKrat6ouNn3fPqGqLr6Bxdw0Fi7UL53kthm1o/5lmrdfVd26qp6e5L5V9fsbWdYZ6iTp7q9mnC9/Jsl/LnV1sTt30bJ03l9V901ywySPzThnunZGwLl3xjkybNF0bXWhJB/q7s9Nk++a5JVJLpjxObpzRkvCXe2pSR6XEWq+bvp5VJKrL55j7c7/5xvBzmS3s3CQOG1VXaGq/qmqHlqjf7HLJUl3vyjJLZP8c0bti2dW1XU3qMi7lelE+FdVdckkz05yTEbY8wdJ/jTjTto1kxxXVYcne0zfTd9Jcpckb0zydxlh7+0zjsMnJUlN/d9192cz9tNfJvnXJH89Lcvm8fCM9+aJVXX9qjpnxnv8vqWaXN39le6+VUYQckrGSc0zpovB3T7Q35Y+daTYW2Tsm+tnfOavk+SJSW6W5NVJHlBVFyoDwq3awo2Rcyf5VZJvJUlV3Twj5DlDxr7/TJIjk1x0A4q5aS37fz1jRijx1e7+eVVdJePG1YUyamn8dUZT5zvv6nKysuUXj1V11STnz/g+XupS4PZJ/isjwLtnkndU1R/uynJuRgsB/R0yBul811Qr+aAkj0ny4iS3yjiO/KML9S1bCPTOlfx210zd/bOMZuknJTm8qg7cnW+OTLWn9804Xr4gozbw9zO+93+a5F+XalNX1ZOq6hwbV1o2sW8m+XmSC1bVIVV12ySPTPJ/SS7T3VfJuA69Xu2CPsYXbgZdKuOa77FJrtHd98+oVPXhjK6qemq9p5/dNeYLiN3OwkHisUnekFEj6PAkz0/ywqp6yFTb6ofd/U8ZJ7RHdPdbNqTAu5mF/X+fjOZ5L+rur03z3pvkiGleJXlMVZ1cVTfYkMLuQt198hSq3yOj4/jvJnlSkitkNMdJd/+iqvap0TfeL6f99fAk1+7u521U2VnR9TJqgN8gowbXPTMG0fpN/4VTTaC9uvsD3X25jLDjsIz3c08I9LdqCjb3zgjafj4dk3/d3Z/PqAV73SRvz6j5+rokt96wws5MVZ2jqs6+wqyvJTk4yfWr6m+SvCjj4uAvuvvFGZ/pX2aEoHu0qjpTVV0z+c2F+NI581cyurV5fFU9JMnRSU5Icrfu/o+M2oBfyjgesAlMF5t7L9SW+UWS007zuqpun+TRSb6a0ffftTK6ibnORpR3k/p+krNldLWSJE/JOCY/Jcl5M/qMvENGCx1WMB1HTpvkfVX1uap6VlU9vqquV6PfwP/NqMV4pSQvWgpBd2Nnz3QO0N3frarzZFyvPT3JZ5Pf1JS/S8b/JPzGUgvNjJtSN8xofv6iJP+d5MHd/YOqOmPGcX3fjM/aulq4Bv6zjPOAt0zfMX88lfGJGedhSfLgqnqF1klrS7jJbmXhruiNMkKkF2b0D3RIRo2KfTPCtQdX1ZmSpLs/092P3Yjy7k5WaMb4vYwaLkv9RS4FPT/r7hdmnAh/KqPZ7o92ZVk3Qk26+/ju/ueMk7V/zWiW/sCqeklVHdTdv5pqvu43Lf/D7v6fjS09y3X3id39uCSXyWhC/Y8ZweW9lmr7TAH1KVW13/T8+RknV/dN9tymKHVqX8jd3b9K8pYkJy4uM+27jyS5e0bNzs4YbIvt899J3lxV11k8cZ6Ovc/NqBn++Ix9f5/u/vD0/XmBjHDzVxtQ5s3mAUneVlXPr6rfX7hoOT7j//1X0zJfSXLXhRukByXZJ+M7kA1UVReuqidW1RmmGydLNeW+mDHY29Or6t0Z/R2+Jcl9e/RxfVySLyQ5+wrnNnuqj2Qcg99cVR/OOC4/OcmDuvvkjJsk3834/LPMwvf9eTO6X3lvRmB3y4zj9fsyAr2nZezL62cMhLq0/u74OfxBktMlOev0/DEZ/3v/0d0nTdMunXEsdTzltywdz7v7iIz/pRdmXFvdsbv/b1psaaCqd3f3T3fheff3k5ypuz81PX96xrXC26ZrvNNmZBP7ZpwvsEYMKMRuqarennExfMfuPm4KiXo6qD09o5bV/bv7qVW1bxs8YdWm/k6+vXwfVtWfJXl5xkXgk5ZOVKpqvx6jhN8ro/nXnZb6ftsTLH0WF55fM6P28M0zPrOP7+7HLMz/zWjSbF7TXdkHZ1yQfCCjT7K3d/e3p/l7Jdln+uwbZCTJdAy4acYNqNNnNE97Va8wwFtVnaWnDtiX/w/x26bm+3+eEaJfNqPZ6FOTfK67T5ouki+SUTvzvQvH5otmXFgf0N1X3oiybyZVdbWMmmmHZfRT+pyMQQl+Mc2/WMYF98+6+yfTtAtkHAdumeRcU+jDBqmqf8y4of1/GaPoPmdh3tUy+gQ/eJr/j93902nedTNqAD2ou5+3Jx9zFl97Vd0iowXCmTOCzddN32kHJLlfxjH8Em2wjN9Spw5idcmM2ltPn4KWyqk1Xa+ZEQxfJKNFzykZ4cc/dPeRG1DsdbVwXfagJH+T5N8zrhf+ortfPi1z/ozvpIO7+wobV1o2o+m4c9okJy+cH/7m/LqqrpRxA/KqSS4w/Q/ukmN5Vd00Y9DiP03yBxnnYNdMcsxUjstmtCh9W3c/YL3LsycRbrLbqaoDM6qo/7S7D5umVZK9pgPKfkneneQ03X2ZDSvobmBqMvO+JP+R5Mju/vnCvAMzmutdKck/ZYwW/blp3tKJ8G2SXGHpwnB3tuzk9vcz+lz58jTvDElukhFyXiujtutDu/sVG1ZgdtgUKi3193NQRrj/H0k+uHTRvKdb+D/404zaKl9O8tGMkdJPl7G/XprkI0shEqtTox/Y2yR5YMaIoU/LOJk+bvnJfVWdN6Pf06smuVF3H+PGSlJjtNMbZIScf5RR0+gx3f2yhWX27e5fTss+N2NAjMO7+7kbUWZOVaNrhptm/B9cJqPZ4iO7+x3T/DN294+XrXOVJA/LuBjeI/ue3VIAUKf2J7fPVOt+afptMsLOZ3f3wxw7huk8+DIZgcbPq+qjGWHMdZJ8c4Xj8NL345kyApE7ZTRvvX13v3mXFn4dLHx+FgPziyV5SZJLZXRRc+8kb8po+fWITDf/d4fXz85Z+P84b0YNzXtkdDHy3Yya9w9Z+FydPsnHMs597tfdr1l+3Frnsh6QMbjggRnXA6/MaCXzi+nc7D4ZLRgP6e7jVXpYO8JNdktVdXTGRdrVuvtLU3O7U5Lf9K/0jIw7KNfpqT9IdswUGB+U0VHzMd3959PF3Zm7+/hpmUMyLqavnlGb7V1JPp5TLxaf2t0P3t0P6gsndAdkNEf6TsaX7eeWneT9XkaNn1tl1Lj6KxfIm8/CCdb5M/qFfNniBXJVnTlj0Kj7ZXS58JyMGi4f3oDibkpV9d9JfpLRn+ZxGbWn7p9R8+frGYORvabH4Fqs0vTd9wcZzRvvkjEgyOOSvGGphuwUyt8jyd0yRo7+5z25ptqSxYCmqi6R0Y/3dTIult6U5NHd/f5p/l4ZN6juneTNPfrzZgMt+269UEbAeYsk58m4sXLEwg3GpYD6fBlBy7kzWpW8a1deEG8WC7XqrpNRi/ACSV6VcQ530uL5WlXdLqNW0v9196GL6+/ygm8yNQauem5Gq5zXZXzH3SzJGxdDvmWf1cWaZwdlnDt/IclNdpcbflOrjfd29yen5/tldJVy74z+i3+a0VT9R0n+rbsfsTElZTOqqrcmuWTGuBpfzbgBcLEkh/XC+BlV9SdJ0t3v2sXlW7rmu1CS52XkEW9P8pok38gYzf2KGTW4H+1m0NoSbrJbWThRuHnGXZLXZ5ygfn9hmbNlnIhdNskld+dQbVepqtNOd6Wfn+ScGfv3A33qSId3zWiqd/6Mvn5PTPKK7r7bNH+3PhFe+KJ7QcYX2sO6+z8X5p83yQ/61CZxV84IgB++O++Xuauq/8qozfXwHv2oLp9/kYwBoW6V5DndfdddXMRNZSEUPmtGjYxfdPffLVvm8hn9bl03Y1TJf8sYRdVxeifU6N/pikkelNHM+i0Zo6IfM9UkOEvGiOnf7NEf1G59w2l7LIVaVXXjjPD3Ykn+MyPsuVCS/TKanR2xcEPvvEm+u7uEEHO3/NyiRvcht8voPmSvJM/KaHXyy2n+H2WEoG/t7tduQJE33MJx+o8yRpPfN6P/uHMn+WBGCPXW7v5JjdGub5vkfEmO7u7PuFA/VY0Rvm+UcePoihkDidy+u9+9jfUWA87HZZxDXKO7v7rORV53VXWTJK/OqBjx3IxmuV+Z5l08Y1/tkxFsPj/Jsa3rsD3ewnXULTNa+NyxT+2+4NMZYzj8bXd/e6p48Mvu/ubC+ut+nTlV+jljd59YVaeZzq3OlvGZvk9OHWTwqxndtR21q8q2JxFustuqqvtkjH5Z08/XZfTr9ucZ/bv8bXf/28aVcP4WLv6WToYfmjHa7o8zTkqOTvKhhZO0QzOa9f0kyVendXbrE+GFwP1iGWHN3yd5wXRhcN6ML71bZDRdOLK7n75xpWVbFj7rd824yLtHd790Yf7BSX6d5Ffd/d1p2vWTfH6qRb5Hh0Y1umD4z4wg7f3d/ffT9L0yjTE0Pb9FRh9c/9bdD96o8u5Opn18QEYfUIdn1Oh8bkY/1J93cr2yqvpKxsB3j16qSVxV18uoCXvLjJrHT81oibDH/m9vZou1L6vqNBm1bG+V5GoZg7ccuXQcnwK7X/ey5rN7mqp6X5KfZ9yI+kZGhYD7JvnjjHO7J3b3h6Zl9V0/mW4kHdzdX1iY9siMG0snJjlTkn/O6F/9OwvniPtlBKBf6O7vTOudOaOW+H7dfdld/VrWS1VdO8lTklw8yWszrhfe3d0/nOYv1mTdY/8H+V1V9ZyMc5fbdffXq+ruGd+/18joAqqr6qiMa81HrfdxaeE6+GIZ13fXT/L/MjKHtyf5+PRdsldGt2PHZdwA/d60/h59TbAehJvsVpZqEE6/H5DR19IdM2pX7ZMROpyYcYf5bhtUzN1ajb6Cnp7RT87nM2pevamn/jb3VFV1l4yBDW7Roz+782V8Id84o4ncOZJcOMnNtnVXn401naT8X0YXA4+YLlDOnRFSH5lR4+C/Mvrd++GGFXQTmmolf2Bh0l8neXF3/2yav9gU+PQZg7W0E8AdN31O/yDJ2TNqhn9mYd55k/xlxo2+AzO6yXjWRpRzM6uqa2T0m/U33f2sZSHZaZM8MaNW548z+o+9qlqbm9diCDfdiLrl9Lh4Rv/hT9jVTRg3k4Wg7RwZtZL/c7F2UUZT4VtkXMSfNeMG1DN6jDBPkqp6UUbQcuvufs807boZgzCdkHFd8pcZ5wmPSvLvUzhymYybKI/r7kctbO/aSb7Y3cfuulexPqbvpCxUePibjO4+OiPgfFXGTU9BOSuqqqdndCl30elG1dcyBk38hx4tCM+V0XLzM0nutas+S1X14YzuO96VMQDhn2aMn/DMJP/d3Z9fWFZgv44MPc+sLdwxuVxGH44XqqqfZFT3/niSF1TVuzJGH/z9JGfLCJL2mNG519NCLbazZHT+fUJ3/yjJHavqqUn+NaOT+RtOd9vevnRHeg/0hYyQ4dpV9YOML7zzZnQw/a81+mZ6e5ILZgx4xeZ14Yyah19b+DwfkXHR998ZneLfZ/p5xIaUcJPq7mOS7FVVD8m4sHtqkktW1Uu6+4MLwebevTAIk2Bz+6xQi+D2C/PellFb6G1TjYcnZ3xeH5rxWeV3fS/J3hmtPjLt270yBij8eVU9LMnNM47ZbxBsbh4LzRj/IKOblysm+VZVfSkjtDsuyVOr6h0Zfbb9ZZJnVdVF9sQLz4Vgc68kl0vyw+mxeDH+vap6VpK3ZtRcPjxjQI9/3KBib0b/nVNrBKeqLpzkHQuh+sczzvXunHGOfIeqemNG3/QnZrQ0+80+7+637fqXsD4WQs19u/uX3X1UVT07Y7C7e2dURHlFVb2xu/93I8vKpvWxJLeqqgtm3Bw/Ockz+9QBbS+acW314h79KO+K5ujXzOh27Y4ZlXl+VVWXyhik8Qk59Rr4rd397T3x+2VXUnOT2VpWw+dzSc6VMRDFARkdxr86407O51dY112TnbSs2cjLMi7+/q7HIDmL/QXdKiPgPCDJ2zJqs+1xg4RMNfuemTEq9P5JTkpyh4wL4pOq6ooZAxk8bammBJtTjT4j/y+jps/jMgYOun7GCdY/TMt8MCPQvr1jzcqmmlNHZdRe/mJGE+lXqgW086ZaBGdP8oKMi+wLZQQ8v5/RfPrvFpbdv7tP2pCCbhLT8fnAJJ/r3x4s5XQZYcWFM/pqfE8vdKNSo2+vF2X0u/s/u7TQbNHCjdeLJnl5Rn+pX884T+wkn03yz939vIV1/jTjBu37azfvLmdrpmae/zo9fXOSu/RC33ULy+2XMcL1/01Bv/PqZWr0If2+jJt4/5nkowvnzRfM6Fv69kmulNHS6cHd/draDQexqqrLJvlSd584Pd8ryd4Loe+jMsLyvaKPcrZgOm98d8ZNx/NmtD551hQoXiijQsEfd/d5p+XX5bi07Br4uhmttv68u7+4rIXAzTP+/8+W5H8yrpP3uGvgXWmvjS4A7IiqOk9VHT598S8Fm/fL6MPmdkkunXGh/PcZd54/XlWPnZqo/4YTsDWxV/Kbzs6vktGU5HPJuDtbY5TedPfLu/s8GSHGdTLdzd7TTBcHt8mo4fDXSf6ou189BZtny/jcHpRxIcYm1mOAsr/LCKqPyWhm9qAkT0p+E3j8OMn+jjVDVd20qr5YVVdYmtbdx3X3TZNcM+Pu+2OTPKeq7rZ0/GD7Tc1GU2P04otk1Ap/eI++pR+SUTPteUkeUFWPW1p+Tw82J69O8t4kl1ic2KO7hH/LCIqfklFj5HwLi1wtyWWSnG7XFJPtsRBMPjuj38g/z2h6ft4k/5Rxs/UJVXXPhf+DN3b3+5etv1urqrvW6Av9N8ePjAvwv83oPuR6SR45hQa/pbtP7u6PLNWY8l03LH13Tfvz/2XUcv37jGPv30yhZrr7ixkDWi11i3CTngax2g2DzRsl+WiSB1fVpabw55SpZt2+02KvyTgG3yWjT1L4HVON+5tkNDv/ZUbLiTtV1f2TvCzJtTP6Bl5qybKmx6Xl56Y1xve4bcb4Hl+byvjL6eZPuvtV3X2+JI/PqJ39jbUsD79LzU1mpar+PuPuyGczRpw+uqrukXECdoc+dXTu/ZNcMuNi7s5Jvp3R+fkzN6bku5eF5kvnzDh5e0rG/v1xLesbr367j7LTdffPdse70otWulNYVft198krza+qv80Iy57Z3Y/ctaVlRyx89s+QESBdIMl3euqnrar2yTjReWZG/6pv3JNrAS2pqjskeWRGuPDSJPfu7h8sW+ZuGfvtpd19u11fyt1DVT0ho5uWP+nuLy+rRXDejBqyl0lyue7++saVdPOYau3dLaPmxS+r6koZA3ssNcs9NKPp5MUy+sX7RkZAdmiSj3X31Tag2GxFVV0lo7XI/XtZf7JVdYmMEXfPkeRavQf2CV6jj8ePZQRK1+tTm3Uu9Xd8iYzm+nfJaHr+xIzBEL+360s7b9NNvX9NcoWMsPO5Sf6np0EHd3dTa5fHJfmLjADo6Rmtlo5dWOYaGQHQLXs3GBWetVVVhyz7vFw2o2LBLTK+l3+ScTx7ene/aheVad8kb8gYKCgZfX8+vLu/PM2vJPsuXPvtP1Vo2a2vgTeacJNZmWpEXTfjZOtKGaORfTrJpbr7xgt34Jeqih84LffAJBeZ7p6wRqYaQk9OcqfufsOyeUv9XV0+ySlJPtl7SL95CwHYH2eMyHrxjLvWH07y5qUQflr27hknekd39202pMBst6o6YPH9W2H+vTJq5n6tu2+460q2uU3H5stk1KC6S0Yw9KjufvSy5fZLcpruPnH5jRK2T1U9MOMi8bzd/Y1p2t4ZX42nTEHe65NcpafRjjlVVd0iY0CCp2TUBPl0nzpQwZ8luWtGE7OfZAyA8YzeDQb72N3UGIjlNRnnJ6+cjkGV8X/QU+D0oYymwI/fwKJumKr684ymwS+t0Xf9FZO8bKGiwFkyArm7ZgQJn8roE/LNU61mFixrqnqG7v7Jsvl/kdHC44CM7ixelOSYxWB5d7LCjfzLZISc18uoIfxvGTWET86o3XqnJBeaWsewB6tT+w8/LMn9MwLMkzJugL+ku78xnS+ePqNlxd5Jju1TBxVe0+boC+H73bv7owvT98roluphGcfK1yR5TpL39RiDYun8qwSau4Zwk1mqqksmuVlGrczzZnTC/Zfd/d/T/N+qKTXVVqnu/tpGlHd3NV08vCXJjbr79Yu1E6f5+2ZcHB6b5CG9Bwy2sPCFfJWMC9/9knwiyWUzmiq/K6Oj67csrHODJJ9YCiLYPOrUvtsul1Ej84pJTptxUfLvixd4VXWRJC9M8oMkd+0xaItam8tqbyf544x+xm6T5KtJ7tndb1pYXt9tO6HGYELHZPT19sDu/vSy+bfLuKi8WXe/dQOKuKlNN0UfkRHCfyejieRru/srC8sckuT73f3jDSgi26GqLpBxQ/FtSe7Wp/b1t3Tzcanvttcl+ds9/ZhTVW/KaDb5siSvWHZMPm+SP8nUpU6Sh3b3YzakoJvYwvnCnTL64n5Sd79weU2tqvqnjKb/J2bUCH/M7nQjb6Fyw/4Z/TxfOMlx3f3haf7NMkLyi2b0NXpgkrNk7Aetl/ZwC8fovTO6MvtRkg9mDOR53STfzQgaX5bRT/K6H7unFnb/lPFd8S+LLWKm+WdIcs8kD54mvWAq38cXr4tZf8JNZmXZXdH9My6Sb5ZRLf00SR7R3U9bWjZjRNM9OlhYTzVGg3tPRvOS2yxM33dq2vcHSZ6f0eH83TaomBuiqj6RMdLuEd39nhoDK700I+D8cUbzhVd290c2rpRszcIJ+ukyAuoDMmqK/yxjcJZPJDmyu18xLX9gxkn88d391T255uEKNTb2TfLrPnWgsYMyjttLg2d9JOMG1e8MAMf2m2oR7JNRO+Zvk7wzoxbBe6fP5BUyRke/WHf/wYYVdAZqDEbzmIwaa+/PCCHe1ysMrsLmMp3/7ZcRTP9VRl/XT+qF/mWnrgZemeTR3f20Pf2mSlWdI2NAl9tl3KB7XUYXIR+f5u+TMTDZzZK8aDqe7NH7bNFCsHmejOax70zyuO7+xBaWP3dG+PHG7n7c7rAvF86Zln4+OaMSymky/h/flXGdttSNz+0yzgO+l9G66182quxsPjW6Kvr7jHPDD9QYP+MiSR6Q8bl5X8Z39PuW15Jep/L8YU/9Mk83g967Quuj82WcY905yecyxlF4jsoru45wk1laFnIelOQaGf1r/mlG+PD3S3ed9+SAYVeoqodm1HJ5XZInLBz4z5gxyMqDkly6uz+zu78XC3cbb5jRdOJePXUQX1Wfzghwnj09zp+pmXrGftNcYZNZOEF/RpLDkvzNVEP5khl3kb+f5OCM0ZSPWGyqsqeawrWlQcUumXGh/IilWtsr3O3++2mZc2UMgPOyDSj2bqmq/irjxP+sGbVjTsioKXNykr/o7rcur1HE76rRjP+xGX0QviJjEJCPL9UEZHNZdn64X0Zfh3dO8smMrga+mXHcvmeSc/U6j6o7B8tq118u45zuahkX56/MuBH79Wn+ft198p68v1aycP738ozainfp7k9OQfsBGd1j/TTJ/yZ52/KWTHPfn1V1lu7+wcJ++MOM/lyfnRH0njejW5rLZYS6D+3uL21YgdmUFls71Rhp/L5JbrAYXk7Xl9fOuBlziYx+bO/S69Af8PT/+5t+M6dpF86omXmBjNZHj1m61ltY5g8zxgj54yTn6zEQEruA0dKZpYUT1327+9vTBfH9ktwno3/HN1TV0VV18d05TNtIdeqIcc+fHtdK8oqqem1VPSrJ25PcPaNz590+2Ex+a7TQi2ecxH4p+U3IcIGMvtnel+TeSX6d5NJJzi5c2JymgO68Gf1DPSejlnIy7iR/NaNZ9VFJbpjkvVX18qo67YYUdoNNTXLSYwTUpf/zW2cMlPWBqvrLaf4vazjNtMwPM2rCXk+wuWOmk+6l3y9TVXepqutW1ZWn4+2/Z9QkfmTGfj45oxbBb5qjO/acqqpOW1WXrqqbVtUlqup00358Y0bf3X+b0TT37RmDD7FJLN1UOfXpGIF5uiB9eMYF8q8yzlXekjGgy0lJ7jitsOaj6s7J4nGguz/W3TfO6GdzvyT/kOTpVXX7GoNCnjwtt8fur5VMgd7vZYQZr84Y+DRJbpwx6Mg/ZNTo+teMc78kpx7H57w/p3Pc79Vourv0vXSTjFDzsd390iRPzeja58EZ++iTVXVEVZ1p15eYzWbhxvhSsPmMjOPzfhmtUX6ju3/c3a/O+N96ZJIzr0ewOXl8ksOnmtZLf//zGee3S301P6+qXjXd0F9a5v1Jrpnkit193LLvKNaRmpvMxkItqgtkNAm9YpJ9M2pQHTXNq2n6jZPcK+OAePbld0jZcctqQ+yfESKnTx2F9zZJ7pDkghmDLRyX5BlJnjUFGrt9uLmkqv4oyfW7+yHT849l3K2/T3efUFUXz9g3f5nkGwKGzauqrpUx4NM/dPdrq+pCGbVZ7tijL619M2on/CTJL7r7BhtY3A0x1Y56WcYF3H8kOXm60DtPRpPeP09yyYyay0dMAf9SU/W7Zoygev2eOl9n+9Sp/fvePaMZ+gHTrM9mDBj0ip66vZjC5JMzDaYyTZt1TaG1sLAPL5URPNxiYfa7M47Tb+xT+2s8KGNfH93dr9/lBWarpprgV89oGfHGjOaAn5/ODQ/JqMV8xSRfSfLR3kNGq16uTm1CfdaMGpoXzWiJ8Jz+7f7q98pofXPXJOfJGFn+vRtR5jmYApAPJ/nX7n5MVV0pyUsybnbfJ8mXM74H39jdd9ywgq6xqrpaxvHzGkm+mOQeSc6U0Zz4z5Yte9qM84HbZ/Rr/MMkF+2tDNTI7q2qTt/dP114fuaMc8qrJDljRg3II1dqLTFVtNm/u3+21i1Rpmvd52QEmR/P6HPzrUlOXDiPukJGRYcb5dSBwh7lfHbjCDeZhYUTsfNmXLRdPMm3Mu7onC3J8Un+rrtfPC1/uoyahL/shU7RWZ2F/X/2jCDiHhkdOv9fktf3qU2v903ye9O8X/WyDvw3pvS73nQhddrpy/bcSf4ryce6+67T/OtnDOjxFz31PcTmVGPQiSOT3Le7v19V/5zk0CQ37u6vTCdWH8zoT/WZ63GCtZlNn/VLZDT5/J/uvvY0/XQ9DbZUVZdIcquMvtrOmTHAx1MzLqrvk+Q93X3bPe04sRamGrPfTPKmJP+S5JcZF5lXyrjIPDqjSelXtrgRUlUfzxis4N8zbswdnPGZvWSSf+vuv9nA4rEVC+cnd8n4Xv1iRoh0pYwRdJ+ZETZ9fQOLuSlV1SszatidlOR0GQHnkZm+yxaWO1/GDT2DvWzFFNy9NsmVM0YDv3ZGmPnI7n7HVEvx6IwBUv6iF/qAnbuqOn3GzaH7Zgyg+d2MAdlu1t1frN8d6PXMGWHouVpfm3us6bh9myQPm2o7Lk2/QJI/zOhS5BoZ/1ePzbgptcsqykyf66tm1Lz+k4zR0J+Q0Ufs0sjse2V0i3fbjNqaP8q4UfRE57W7nnCTWamq/87ot+XR3f3KGv1eXDOjxuCVkjw5ox+Xn29gMXdbVfWqjND4mIy7rVfNaILy9iT/0tNIiNOyv9W33p5s+txeKKPPpX0ymjReoLsvvKEFY4dMQebjM/rfvNR0QX3hjBHSj+7ux291A7u5qjp4an5zeEaI+YLu/tjC/GtlXEjfMKMWVTJquRzW3T/ck2p3r5WpZtBRSe7RC32+VtVNMgZRuUDGYDivygg5f7rihvZASxcdNQa1eEaS23X3f03z9ssIOP8qoxnlv2f0uWvU002qxiB+H84I93+Q8Z17x4zucY7PCO1eu6fW1lyy8Lm/QUbz6Ydl9Bv9BxnH51tnBMQP6WX9yE3rO05vRVUdktF1zR9l1KK/f5JvTfv88hnnC//V3Q/eHYOPqjpnRk3fm2c0v39JkvstNRteHnKyZ6sx6NT9kly7u/+nqs628FnZN+M4fqOMSjVnzfiuPqq7v7qLy3nWjJZID0py7oybZs9K8pWFFoxnnpa5e0YLpqvtyjIyCDeZjaq6WJIPJHl0kqf2bw9KcdmME9c/SvJH3f3JjSnl7mehVsT1My6Q75fR1PyUqvpkkrNn1KA9IaOD5X9vo8n+lqnG8VszLpZPk1Ez6C7d/fYNLRjbtPxCrqruk1Hr8IEZF9L3zDiZuXB3f213vFjZlqUbGdPd684YdOUWGc14npPkTd395WnZ02bUvN8ro7nRp7r7uy54tt+ymvTXz6jNcOUpWD7t4s29qnpAxkXB+ZNcQ5PS31VVj80Iwa4xNWFeHNDgnBk1Yq+Z5AptAIxNpU7trujMGbXnX9vdz1iYv1+Sy2ccr2+Uccx+mO/epKoelFGz8E49DXZRVWfLqFF/r4zP/Osymlju8YPl7Yg6ddCl/ZdqZ9boj/OBGbXUztvdP9ldg+KpRcelkvxNRm22nyR5eHc/c2H+Xr7zSZKq+v3u/tLU6vJ1Gf0iv2ThuHSGjFYUfzk9vp0RLh7Vu7DbuYXuTe6S8dn+UUYtzqOTfGfpf3mq9PDz6ZrAue0uJtxkNmr0U/jBJA/o7mdNF9K1cBFyUJKvZwxg88ANLOpuqar+K2Pgjwd197FVddskz8s4Of69jLtpP8oIOe/b3W/ZqLJuRtOX4h9n7KtPCODnaQrnnpLRPcPpknwjyVO6+8lOYk5VVVfNqQMnvCFjII939vp1+r7Hqap3ZVxAnpDkXj31ATl9N+69UJvg3Enu0N2P27DCbmILNywu1t2fm6btneSUqbbVtTOa/V+7u9+5YQXltyzUQDxPxk3XyyV5WXc/e/lNpqk58LUz3ucndffTNqLMG23hxsilklwsya27+6YrLHdIRjPLByY5X5KztT4Rf8vCvjxzxmfrKhnXIC9J8v1lTbD3yegD9pIZ4fqz9oTua6aad9fKqL167Ywbnn/X3f+zoQVjU1i6IT4dxyvjGP66jJvfH8rod/O1fWoXR2fPqYP7XTbJOTeiheAUwl4s4/h4y4xs4nEZXSz9Tr+g7FrCTWZjCi8/nNER/K27+1vT9KUBAc6cMZrxRzJqxQkZ1sh0cXx0ks90912maV9I8r6M5pA/r6qnZTRD+U6SmyzdcYPNbkdrW07HovMnOVeSzy8EIntcrc3kt2pP/c7rr6o7ZzTlP23GYEMvTfIBzXt3XlVdL6PFwqUzmuEenuTVC0269kl+eyTk3bWm0M6o0bfXMRn9NN5reS21GoPl/XuSm3f3mzegiGzFdIz59+npp5Lcprs/u4Vlz77ULH0PPl6fPqM/xNNk3Jy7VXe/fwoXaqH20b4Zx5Yzd/db3bxbWVW9PKOlwokZ/faekFGr7LlJjp2Cm4smuVOSL3T3szessBtkurlw6yT3zmi58dYkN21diO3xVmgdtXdGtw73mCa9KclLu/sdC8tcMOMG7ufX8ybB4nfEFMSeYTG8nI6l18w497p8RjcfT+xpIEc2hnCT2Zgu1B6W0Y/YURl9XXx+oXbKlZO8MskLu/sfN6ygu6HppPc2Sb7Z3e+sqhtl9Bt02yRvncLlRyY5S0Y/TSe4iIY9S1U9MGMgsacuNcubpldGf8j3zQiQXpkxYMUu7TNpd7LspPvOSZ6U/9/eXYfLUZ9tHP8+SUhwd3enaIEXaYEWKFLc3Yu3OBS3Foq7u0txt+Luxd2Ku4eE3O8fz2/JsJxAEpIze87en+vKlZyd2c0vmz1zZp55JMv8LyaPzbdV3n8HJQahUQFCDgvYHXiGrEi4StLrkVOAdwWmlXskt6SIGJvMmluNDKA8DewH3NiZJYtdRQk0rQMsTvaPfhbYttGyopq13PS8tgwGd6SSNbwocCN5/LgOGIvM5NqUHPR2EHnD6ZPIycv9S7ZnW54fR7Zo2oUMmK9d93qsdUTElsB/GjemIod57kO2fXqXDBxeIOmZTlpP46Z9H2A5clbCOOTwtZOBK8r3dU9gfHJg5hFkG48DOmON1jEHN61lVU4exgE+rlzIHUFO2H0cuAh4DhiBTA+fFpjcdwOHvXIRSDnYL0/211xF0i3l/+gfZMnNgj4Btq4gIpYlL0YuVjeaWtqZKpnzmwD7A09JWrxsa74jPyWZ0bIEOVDLwc0hNKiL4sjegoeRvfI+JAN0lziD4Md+LkATEeuT/bPGIS+m3gNmJCdIr1fNHLHWUzJwf0dmh81JVpscBDzmc5KfiohZyeFuG5HnzmeRN6ffLtt9U+QXlIDM6mTbj9fKjbwRyCyuncgBTbcBR5HB9rYfslmuJXq5esMaIudmPEImLu3RlB25IBnknAt4ipyafszw/l6qBDePJrOu3yHPC8Yis48fI28K3VP27wlMoYH95X0zqCYOblpLqvSymZwMmo1GNj3/uGxfkezhsmDlafcCh6iD6Y42bEXEjOSE9CfI/kKzkQHnbSSd6pNia3UlE/wb8uL3n5WePkP02a3chPkt8J6kN4bPiltP5d/ehyxvPB/4lwY2gZ+ILN8HeE7Sp+Xx6SW90K7ZK0OjcqLdi2xovwLwFfAC8IakF8t+05EDcP5IBjmXk3R/LYtuMU3Zrr8j+5V+AryrMmCmXKDsCPyWbKXwHHCepEfrWbU1a/p/7EVmgTVKzXuSU79XJifWjg6cRA6eaJtj86A0X3CX0vP5yKzXdYFe5E2qIxyI61jl+mROYA5gbUl/7GC/0cns2L3Ic+Tx5Z7TZh2KiN3IjPvLyZuJ3zZtXxc4FLhGpT3acFxL43xrDrId3n5kr+avI4cFLUz2/ZyejE2cOzzXY0PGwU1raRFxPTAucJSkczs4MfstMBPwBvCEpE9qWmpbKSfEO5DT4kYHvidLbzaqdWFmgykitiYzK1aS9EjJJphApZfvEL7WeGRD8X0lnT2Ml9ryImJXMpCwhqT7S/bKimQW3FTk3e69JJ36My9jP6Nysr03WfI4FjAA6E32pNoVeEEDB+ytQA5uWHAQL9l2KkGJrcnm/6MAAl4ie26eKunOsu+Pps5ba2gK8q9BZtSMR05jPgG4TNJXkQMfZifLr7cgJzXvX9e661R5zwKYlCyhfAf4SmVIUGTP+t+T79dKZNbyTI0bUvZjJVP+TfKz9wWZ/XqlOuj9V8prp5F0h2/omQ1aRGxAnjc+RA6v/W9UempGDvTsJemLzvheiohDgFWBZSU9VXm8F1klcCZ5Y/+Pkr4anmuxwdej7gWYNWuUP0fEn4BFyOmWFzQ2V/eV9JCksyXd7sBm55HUT9JB5F3pTciD/N/gh8wJs1b3FVl+Okv5+jjg8XIhMljKxSLAgWSQ6dZhusKuY1TyBser5evNyIzYT8gL5QeA4yJi+nqW17WVoNyAiJif7Av5bzIQMQ4ZhOgDfF4CdyMDSLqiEdgsJ+Jtr7w/owEHkBNZf0P2aryfrAI5KSIOiYipGoHNxvmItZyDgaOBMckL4T5kWfUS5aL3a0n3kVmIy5DHo+oxuy2UhIBGAOBYcuDmQ+X3/SJiyYgYWdInkq4gK6K2B46X9Gm7vV9DoCd5Q+9M8lp6f+DPETFi846S3pJ0R/mzA5vW9hrXiRHRp1SbNFwA7Elmkx8QEROXtke9Ivu4f0PeyOqs76VPgAnIXvGUdfSQ1F/SbeQMkPnIDE5rET5ps5ZTOWD9GXgLeLhclFSnOEb5fbGaltnWIqJH+f94VtLFkv5L3r3G5ejWRTxCBoZ2i4iDyQygIyol1b94UVdKsucCNgb2JgcItKO3yAzNjSJiTzJQ/CiwWrlgPouBwWQbQpVj6u7AncDRyl6aCwOTkH1M3y/7LB/Z27T6/OEySbSLmoosNT9S0lPlBun6ZJ/Gl8jBeedFxHYR0cfBiNZRyUCcnvz/OgKYv5QovkZOSn+67NMI8r8r6XpJ/dq0B1ojWeBAsjfkTWTP4yvJypsjgJ1KiTWS3pB0JHnDDpoSCixJ+kbS5eSNkq3Jn2+XAmdHxFwOCpsNWuWc5hjgyYj4d0SsDswAnEJe/88PXBIRk5Vg4nflucP9GF65Ifw4eeNst1LN0b/8fBmhbP+a/N6fcHivyQafg5vWyr4Axpb0fPXBSp+3SYGDI8IT9zqZpAHl/yAqj7XbRYN1YZKeJAMZn5Hl6X0BImLMsl2DeYFyOHAXObylXb8HTiZLQvcAdgNOB3aW9Eo5SRwb6EeeCNpQiIgxyBYgb0h6uTx8NNmf6j8leDMBmZ21ijMOB6pkiUxCXjBNAnxaHhsJQNJ1ZJ/GA8hy/4OBhWpYrg1CJdC8DplJc1X53M9DtsE4lIHZ4ztFxHkRMUrl+W11fC7nyt9HxIRkAO44YCtJt5AZSR+QLUP2Bg6PiM0bWVSN99rB/Y41zg2Uw0POA9Yj38cFgNuB/SNiikG+gFmbK8fmycmqpwWBU4FzyAqoxcny9DmASyNi0U5aU6PycO2IWI6cK3Ez2YZtt8ghbJSfO+OQrU8GkNVJ1iJcqmS1i4gxGn1/mvwXGCsi/gKcqTLNuHKCOh3Zd8yf45o0XyyUi+uxgOfb7ULCupaSBfRgRGxDnpj0IgMbv4mIc4FbG8ec5t4+lRssq5EBkCUlfVHDP6MllDvZ25NZQOOVctCG2cmedw9JeqKWBXYDkj6LiP5kCwAiYgtysNBGlZ+fM5LZw187KDFQJUvkRPIi6mvyoulFSd+UC5oeJTPkxIi4juyx1a5tJlrdR8AolePJMcAtQCNDsw8wDdlTtW3PDyvnYGuR/SFvkPR5REwN7AxsKumMiDiSHAj5e+A0sqevVcTAfr3jkMeQeSMH5l0M3CHp2Yg4nJyMvj45bGTziJhF0nv1rdysNSl7I+9LtjT6kgxmTkaWea9EDuH6hhzutzrwn05Y0/flOvYMYH9JV0XEWsDZ5M37pSPibrJK4M/AH8hBQx+HB+m2jLb9oW+tISLmA86MiL8DtzQFCO4mS412Az6JiBuBL8vBZyry7v3I5J0eG8aGsoTrcuBF8uT4u2G/KrNhowTkepABoj3Jk5e/kRcli5PlZRdIerjaDkNF5ECBA8njz511/BvqUCkNHZk8+ZyZzAB6VdLTZGlvY99ZyJ7JE5PTeId4Gr39KLh+JXBoROxEDhA6nDzJbgwEWZocFnJ+eawdy3B/zvFkwGsR8vt7WnKI0AfA96XUbIByqvbx9S3TfsHbwIQRsSAZxJybPGZ/WrbPQPZSvncQN87bRvkZ9y0ZEH6uPHwg8BQDgwXnkYHNm8isw5/c0GtnjQzY8uW5ZHDzOzL7dUPg3IjYpnzW7o2Il8gg51QObJoN1HxOIum+iNgZuLD8Wrm0e9g1In4DrEL2xj6pPL8zjktTA/cB15c1fgQsExFrkK2BtgBGIAcJHSppv/I8Hy9bhIObVrcgT7wuAK6OiEOBxyR9J+n1iFiGvJN8HnAtcE9EfEMGNmclS2x8QPmVKnelRwemJftW9R3M5zay2FYg77Ad2OiNYtbKSpDuKqCnpK+BHSLiBLK0dzvgTxFxGnC5pFebAkX7khfWf5bUr9MXX4NKYHMUMqi7NDltuifwdkQcCxxVsqd6A8sD/cmp3a+W5zuwOeQan7vzgP8jyx8HAM+VzMNRgb+Qw92OUA4CcRC5iaTrgesjYj3g7+XXIhFxBtlWoh84AN8F3EyWCx5Lthg4D3ignMOMS5aoz0pm/7R1oK4cr+8GXpL0Sck2nI38mfZa2W1E8jh9lKS3G8+rZcGtqQd582MfYF5yivMJkf227yGDnNWAzfvARTFwOGrbfv7Mqhrn0BHxO/Im1XvlpvhsEXE+cHpE/Au4Qtk66slGdWe51hwu30eVa+CpyJsX0wNvlG19JPWVdCFwYUTMS37PfyHp3bKPv8dbSPimvtWtlHmsBOxCTiU7gWwo/Fq5SJ4OWIMMaE5HXtQ9CZwg6ZR6Vt09RcQpZCbWUZIuHoLn9SKzAu4DNnZw01pV9c5xKUcdnQxufhgRvVSGr0TEUsBRZLD/CbKE7+HK6yxKZgid3C4nNZXg5plk9tspZEnoGmTJ7+bAGZUg0bjAiKoMaXIm4a8TOUxlTzJwDJk9MAIwHtmDcN2yn99rBv0+RPYw3R3YgAxMXEd+dtsmC7srqlyEzkWWDs4GXANcBrxOHoMWITNyd2/HQHXlPZqfDGp+WDl2jwjcS164r0j2u9uUnPb9W0kvDfqV21e5gfQ4WZ10kKSPIuIgYF3gj6UsPch+0/+S9Gx9qzVrXRGxEdlf8wXgGeBBsnJqSmBLYEzgH5LurWFtjwBzklUAu0k6qbKtt69tuwYHN60llJOCqciTrC3JEpp/ARc3elmQF3CzkD043pb0aU3L7VYqJ8IbAYcB/yBT7Ts8ODQFhxpZm7uSZZILKyenm7Wkyud9MXIIwCLkFO87gDPJUsa3K/vvDOxIlph91fkrbg2V7/XpgYfJ7MHjJfUt2a3zklmsr5ULwSXIYJsndQ+hymd0AnJAxehkZtX1kj4u+yxDvsfjkFU4Z5LZa5+0Y0CnWSWYE8AY5I3RLyQ917TfLMA+wB/J93F5Sbd19nqtY03nGyNK+rZxkRkRE5ODcrYkv0cgW2QcARxcjldtGeQvWYNvkAGElUo2d5BZiKeTfTjPJNtYLEBmSm3qDKSOlYzXO8mkisNL0sVT5HnvsSUR4//ILOIdlKW1ZtakHLcnJ8+/G62NRiJvMH5BJjK9T1ZmXtHJa5uZ7P25VHloH+AcSa+W7T3IBNS2+5nSlTi4aS0lsufVHGQwYWXgfjLY9h9J39S4tG6tnPS+TPZg2lXSB4N7khs5ifNFMjC6n0+MrVVVgkbTkNkrfcnevpAnWTMC/wZ2bGQbluf1ktS/mtnZriInSB4LbCDptoj4PXncWAO4tASU/koGHZaR9EKNy+1ymrKHbwAWI4Nun5E9qM+RdPggntuWgZyOVILxW5A3Tachb5DeBewp6cGm/VcENpS0XOev1po1AnHleD012Q/5z8DzwFXAnZKeKvuOQgb63wTekfS/8njbBurKufQ25BCMIzWwL1xj+z+AzYDPyTL/vylbXLTte/ZzImJMcsjp1ZK2jIgrgUnJmyFvlc/rZuSMgLUl3VPfas26hnLTYDIyW3IjskXGyOTP6xckzVjTuhYFjiOvCW4hbwhdrzbv4dxVOLhpLaH5hKqUjP2e7Ik1N9lo+HCyF6TTwoexiJibvGv2L0mHNW1rZMDMBUxf+o5ULx7PJKfbLSbpnc5eu9mQioiryRYYf1U2NB8ZmIi8obIr2QtoWbLMsWe7BzSryrHiHrIU7+6IeJwMKqxXsgZ7k+/hyuTE6TfrW23XEBEjAfNJur3y2BZkBsH+5M+/lYAlyb7GL5K9ja8u+zqboKJyE2NB4FYy4+pKsgT3MLIM/SxgX0mv17dSaxYRIzXfyI7sGfkb4AGgD7AQGWg6ibzgfLWyrwP8RQlwHkFmtm4r6dim7SMDo8t9436io89R5ODTLchjxy7k8JOryrbpyWBIH0m/6+z1mnUlgzrWRA4ZnpY8z7lJ0nXDsxKlcn07AZlBOhuZkf20pPci4i9k7KEX2Wf+QuA2HydbW4+6F2Dtq9zpBAY2L4/s3Ui5O3It2RNoB7J09AbgoMihNzZsfUKW7vWGjv9vyGDPauWHACWwOQtZWnCoA5vWFUTE5OSF8l1krx8kfS3pZXJC8p7kSc7KSg5sFuW48BbwCvD3iNiS7Dt6APBl2W1mMrD5qKQ3q8cSG6Q9gdsi4qRS7ggwBZlFfIqk1yUdQfYTPIQ80T4rIi6IiNkkDXBAZ6DKhdBBZFbxtpKOA14C+gGXkOcWD0TE1j6naA0lSH90RPy5ZMoREQuRx5QNybYXvwMWBr4nM8iPjYhVImJsGDiwot2VgEA/YFuyn93uEbFKY1vJEP+6EdgEDxGqqrRCWDwi1inXJmeTP//+DrxXtk9Yzin2BeYnr1ca/bzNrAPNx5rG94ukBySdJ+lvkq4rjw2vwGbPEticjOylexN5M+gW4KGI+CdwMdnP/Fwys/QGsgWTtTBnblptKpl/CwGrk/00HyHvzt/cSP8umUBTAdsDi0iaoa41d3WlBODdDu5IjwZcT/ZfWqoEepqfdwwwtqTFKo/PQp7QXaCcNm3W0kqW3IvAjZI2Lo/96M5wRDwBvENmHjq42SQiliCHeUwA3C1pkfJ4Y9jNisB0kt5xNtAvi4ilycDl/5E9p44lbzbNJGm1iOgDfFe54J6XbAOwBvAVMHMJZFgR2f/uLLJ39+nlIuYp4GnyXGJVMiMD8nt9FrmPd60iYhHgNjJwdB5wETlc4p/AWpJeaGrbsBH5/9uLPH/ZR9Lznb/y+g0i07DRTmU2MutobOBPkp6pZZEtrHI9MgEwIfBk+for4EhywMlX5Vi8M3kMGQn4mvyMvkr23jzCP/PMhk4dmfcRcR15A+04cijuWGRm9gJkldI6kl4v511/k7RWZ67PhpyDm1aLyknX/OSUy97kJMI5yYu7O4DzJd1Yec4oZMnHxzUsuVuIiKfJxvvza2BfqsZJ3Srk3alHyJO3+yoZtbsA+wGrSrqqGgwK9yG0LqRkB11M9mhbo3F3uFLKOgpZejImsKSD9j9Vyhk3ADYhb0p9ADwHzFP+fLikE4ZnOVF3U0pI1wc2Jvs8fU5OQv9Do0w3KtM6S6bDSsCnkm72e/1jEbE4cCKwfmmfsDrZN2tJSXeXfe4mf949Kem0+lZrDRExFXmDZAPgSTKLZnFgrso5R/X7IMhsm62BCSR9VMe6W0Vkr7i+wIPV87KImITMTuoBbC7pYR8zfiqyX/RmZDB4ZnLQ2J8kPVm5bukDTAysCYxLtq+5hOz3Kgc3rZ3FMG6TMzyvMSNiCrK641BJxzdtWxc4AXgMWFHSh5VtPna2MAc3rVaR/do+JHtf3VUuQC4gA5xfUO7eS3q0vlV2DyVoszHwO0mN8qSJ9ePJ0OuTDeinIXtwfkKe4E0HXCdpjU5fuNkwFhELkMeZ74GjgH+rDBAqF4dnApdI2rGOO8mtonLjYzZyeMI4wP2SXirb5wBWIEvTJyKz4s6U9FD1+XWsvSupXgyXLPktydL+GcnJ9NupDKgoQc0eztT8eeV9WlfSmeXr88gJrauVjOKJgEvJc4wT/DltLRGxMJmxuUB56HTyPPHNsr0H0KsS5Bxd0uftfNEZEWuTQbkPgJ7k0Lz/AU+QFVGrA2sDV0naqq51trLIYYNHk9OS+5PtsXYdnIxg/7yzdlf9Hvg1QcnKuecPN7KGh9LO5ETgQkmXlZtlP/TZj2y9dCxZOXDh8FqHDVsOblqnqxy0liUPKltJurJse4q8mDul/JoCeIjsc3GoMwR/nZIdNFK5CFifLC39O1lO82VpATAfeWL3J2BKcjrp6WQA6ON2vniw7iMiliLvyk5OZm89Rl7M/Bn4llLq265ZGJVM1nnI8tCpgG+A78gMoO00sHXIT4aA2JApJ9VRCXLORQ6vWIH8XP4bOLgS3BmuJ/1dTQwcDDA+MIOku5q2HwOsIGmy8vViZKXC3xsBUGstJUC9OrA7MBMZiD6TvMHyVdmnF5kl1PbnJOWzPzvZU3osslf9mOSxewB5LjdX2f0MYGdJHzko91MR8RIwdfnyGrLf5q2SPqnsMzr5uXzRFWVmEBFXkVnjB1QeG6prxtKm5EpgHkkvDrNFDnz9zcgYxNfkTaHtgW+r2dcRMS0ZgzhJ0q7Deg02fDi4abUppc4bkYM7noqITcgsqsUkPRARfyAvor8HzpC0fY3L7dJKhtWbjZKtciE9L3nxvDLZc2xPSReV7SNK+jYiJgS+aFxImHV1zRdyEbEn2X9vPPJC8Cwy+/B+B/IhIh4is4AOIi+Qfw+sBYwM7Cfpn5V9HXAbxiJiZbL8f37gfTIgf7zf545FxCnAomQ5/+uVi5QVyADxHWSG8bJkH1P38G4hlZvf1QygsYCdgL8BnwGnkf+XT/r4/MP7NSXweXOQrZRd9iUzYGck+0T+mazO2VHSSZ285JbWyNwC/gq8Rt7g3x0YgQyuX0D2mf4+IpYBDgPWlPRYLQs2axHl5so5wB/IoZPbqLSWKzeqNCSJAqVtTH+y9/2Xv7T/UKx3YfLnyjzAKMD2ampPExELklWMB0v6x7Begw0fDm5abcpBY2lJu5evHwX+S042/SxyWM0JwLrA/5y1OXQiYlayd9UJwPnAA5WU+3HJIRZbAkuSzfx3a5SVmnVX8ePBFGMAowFfOwNjoHLBfD0ZxLygPDY6sCDZH3JVcpDC3yRdU9c6u7JKhuw4ZFbV9GSG7KuS/lP2abQUWQdoHM8XdbbsT0XEWmSlwU1k0KGR4TcC2UtvbTK77VrgMEkP1LVWG7SIGKuaJVcemxHYn7wh+ygZaDpPlYnf7aRy7JiO/MyPQw7dfP+XsjEj4nhgU7KXnI/dHagEjicgp6FvRgY8zyRL/9clK6HmrG2RZi0kIiYl+9RuTibQ3AJsJum1sv1nS9Ur33MbACeR5zn3Dsf1jgqsUlnvzcChwEdkwsNfgdmAaSX1dZZ71+DgptWm3CEdSdLXETExcDXwqKRNy/alyYPbOpLuqHGpXVop2zqVvKh7EziZ7Ln0TGWfKckBK9uS/fNOBvZv14sGaw/NpcD248wpsi3IYcCJyqE11YDwROQxY0Pgd8AOko6obeFdUCWrcDTgRjI7cwA59ONjsmfeAZIeLPtPAewKvCLpEJ9odyxyON7JZMbFDpLeK4/3IAfqjQB86eBw64iBw1rmIQe1LECWVt9C3nS9p/L/uATZj3NOYCpJr9e07JYQEbeTx4xDJF3dUSuVSiB0hNJuZVLgLuBqSdvWsOyWUjkWjwD0AsZWGbpZ2Wcu4F/AYmRF2dvkgLLnXOVhNlBEzAQsA+wIjA/8A9hbgzGINnJg5UvAFWSy03BPbCrns5uSVTKTkDM/3ifbd9wn6fZfCsxa63Bw01pGRFxDZq3sSJ5cbEaeuLpsbBgovUOOJ++q3Uf2NL1ZA6em9ybLllYls4QmANaTdF49KzazupRy/X3JfkR7SzqsPP6j0vPKSexJkr5wwG3wVQLJZ5IToY8hp+7OAKxHXkR/DuxFDtb7vqPnd+6qW1/JdN0T2AHYR9KBNS/Jfkbl+6An8AIZ1HyOPPbMRmbVnw8cIenp8pw+wHyS7uwomNfdVd6zP5El+hsBF1dL+jtowVIdXDYamd3cV9IidfwbWkVTYPNo8iZTD/KzeKCkx5v2XwiYjEzGeL4dP39mHWmcH0YOodySPK9p9K79lBzOdXLZN8jhiI2AZ+O4dRCwAbCQyvDKTlp7kD9vti7rnhDYVNK5ZfswnQJvw0+PuhdgVrEFmbVyPnAxGWjbstYVdQMR0aPcVX5J0hJkr7EJyL5Vh0fEMpGTRr+T9CRwCLAGedfszdoWblaDcoJDREwZESPWvZ4aPU8OUvgK2C+y+TrlxDXKhSCSniXLex3YHELlRH4ssv/d8eTQvJclXSdpDbLH4KjALmQ/2J88vxOX23Ia36vNJH2lbP5/CLB/ROzV5t/LLa3yOd4L6E3eVF0AWJ7MzjyBbMlwVGkhgqS+ku5svEQnL7l2lfdsfrIP6TPNAc3G7xGxTkSM2hSAm5IMGh/WictudUeQQZWPgNfJwUyPRsTZjc8dgKS7JV2gMkHdgU2zH24SfFe5cTIxeYNxJmAbcnDniRHxcETMq/R94+d4OX5NQ5aCHwK83JnrL+t5kgxubg3cCZwdEY9GxGKSBrT7OVdX4cxNaynlILcQeafncUlP1LykbqO5bCYidgT2I/u7nQ5cCjxcuYs2pqRP61ir2dBqzqIYmoBbRMxOBvZWlPTwsF5jV1HahaxMZhHODTxIlvneU7aPAAxwOd7Qi+wtfQg5ifewkr0WGlj+vzA5BGc/SfvUt9LWU8m4Wgh4lvwsVqcZj0lWKCwCbCj3Fmw51fOSyKGSSwLrKgcaVjMN1yN7HR4j6a+1LbjFRMS2wJHAGJK+qDzeyIKaELiVTBr4Z+X9DGB+SffVsOyWU8pSHweOVBmSFxHzAyuSlUyjklngB9W2SLMuoGRebgQso8r8hhK43JhsrQPZNmYTVdqfRcQlZDD09yoDcOsS2V9+DTIwOwvZj3MFuZ1Ny3PmprWUcufkLklnObA5bFVT/8vXh5IZnFeSd9fOAraJiBnK9k/rWanZr9LIVtk/IuYfyjutBwEfkn1/2kIJqhERU0TE7KW86G1Jx5B9Nf9JHi/uiohzI2IiSf0c2Bx6kdN2/0tmp/0+IsaW9L2y9+AIZbfngTeAaUpZlBUlsDkF2ZPxTeD6iLg2InaKiMXJgQDrkdPRz4mIZWtcrnWgcl7yD3LK7vjA99XPegnUnU1m0vyutB2w1BiIdVBEjNd4sPJzbzpgROCj6k2/cq7twGYh6R3yJtJjlcfuJxMA1iZL//eOiE8jYtF6VmnW2sr15UhkT9oXy2N9AEpVyt/J9jsvkS3Slq48d2yyUmiHugObAJI+V5bQL01W1nzowGbX4BNls26sWrYXEb0jGzWP23hM0heSNgLmIIM5hwOXlMw1sy6nUuq7LlnGODMMDN4NSqUcfTlyUM7f2ynAXwlSXgGcA6weOWgMSU9J2p1stn4mmV31v8jJ1Db0XiIb1vck24XsGBHTlYy1fmWfRsDiS5c/dqgvOQjvb+QQpnGB3YAbgIfJ/tLfA2OQfbytxUTEVOQwh6XJUuAVSwnggHKTpRGoe478fhhvEC/VVsrPrKfJYZxbALtExJyN4G/JCt+KHNx5YuU5ba+0Vmn8zF8qIh4gpyWPXh4bsRyHv5J0I7A9A6el+9rZrAPlWP0SeSP8/8pjfcu3W5+y2wtkxeDikk6vPPdj8jh2c+eu+udJepM8x9iw7rXY4HFZulk3VinbW44sB/gNOeHxAbK/29tN+69J9neb071FrKsqWT/rAscCl0tabwie9yzZG2hDSX2H3ypbRwycpNuHDDIcDXxJTvA+HXiwcSc9IkYFViDLjv4i6cV6Vt09lPdzXjI4tyxwDxlcfo7sQb1z2T63pP81t12wH4uISYB+ZC/C2YCpyLL0scig2Z2DfrbVJXIC+spkGfC4ZKuGPRpB/sgp6scCn0lasraFtqiIOBlYH3iLDOp/Rd6kGwnYQtLF4Wm/RMRo1fL98tjeZPBiRPJmyEqSPi/bmgfoTdx83mxmA0XE+GQWdC/g78BNkj4r20Ygb5KvDywr6cNKCw33bLdhwsFNs26qErCYiyzbex+4hWzyvASZqXkccEhHF8s+EbauLiLWAU4FzgZ2kfRJRydQlZOrnYHdgYWVjcW7vcoNkBHJQW7jAJ8AQZZLj0pmF54PPC3py/K8scr76WDbMBAR45DBzZ2AmcvDL5PH7AuUU6F9TG5SSvk/Hoz9ZpX0VGesyYZOqSz5MznUZUnyHOViMug0BXlsWkvSc9HUQ7zdRMSc5DF6REn3lsf+RA7CmJ+cNP84cLykG+paZyspx9jzyJ9nl1UC5yOSn7s1ySFWH5GVG6eW7QGMUA1ymtlPVc6lVyX7XQ8gv+duI9uKrEz23PyvpOXrW6l1Zw5umnVzEXEX8B2ws6RHImIpcljK4+RF9H+BAyRdVd8qzYadiOhFlqKOChxAZi3/rXGxMojnTEBmyx1DDg5oi4BdJbh5IrA4sJOky0pG4dhkM/WtyVKjU8kpmM+3y/vTmcpF9JRkE/tNyRtRu5PH6xf9nv/opt28ZDBibnLC9hnAWcpBNEGe3w5wQLhrqN50ihxktiqZ3fMbMoj3L0m7le19gO/aKcun8TmOiN8DOwLLkD/jPgWeIH9m3V32nRD4AuirgYPJ2j4rqgSEHwEukbR6qdQYpZHJWc4BGgP05iUzYHdsZHv7PTT7sabjdi9g1EY7p4iYDDiYPJ8ZwMB2Dk8CS0l6p91vUtnw4eCmWTdUuXu2MDkoaA/gwnKx9yjwanlsF/JE7jPyBHlpSV/XtW6zofFL2YMRcQ55grUlcGpHFygRcSp5wTh3u5WdRcS45IXcbWSpeb/Kth5kJtWp5MX03eTNkFtrWGpbKJlEswB/JQN4zwGHATdL+l+da6tTJRA/JvAoMAp5c64vsBT5Gf6npMvrW6UNrebgUWTv73WAP5E3Ys8iM/A/KNvbImu8KYDwKhm4vBR4nXxvFiR73B0J/EPSZw7EdaxkB/cpVQeHkkGXs8ibR9+VfWYlzxfWAiYlpzpvoRw6ZGb86OfxyMAqwObkjca+wAmSzi37TQ+sRLY6ehu4R9J7Dmza8OLgplk3FhFrk6XnS0u6NyJWJksElihljiMAz5DBzoeUQ0PMupyImIjsT/gC+Xl+gMzKeCsixiA/9xMCm0t6uOmCcUSykflLkq6u519Qj0bJHZnR8o6kJcpxYUD1xDMiriJPWicDZgQ2lvTvOtbcLiIHg/yevAn1f2RAb6V2C743VG7anQEsBGwt6caI+C1Z8vYhMAk52Xj/dmkt0d1Ug5bl+LQEGeRcnqxCOULSgTUusVNVPve7k70hV5N0R2X7Egw8Rmwh6ayaltpllCyzM8hJ6I+TFRu3SXq9ss+i5M2lTYB1JJ1fw1LNWlIluHkKmWn/DDkh/bfkOeLywHUOYFpnc3DTrBsrpaVbSTq4fH0NeWdtHUnvR8SkwOVk6eOtpdyvLbIhrHuJiI3JQP7XwMjkQIXnySyXy4HpyaEtDwDrSnqp6fl9gH7t9tmvXDifR/YdW07S7WVbH5WhShFxPgNL0y8BJgL+KOmFelbePiJiPDKLaGFJq9S9njpFxDTArcDxZHbIFxFxCTAdORl6PbKk/xsyyLmZpG/rWq8NvaYg5+jkILO1yfYZG0g6u8bldapKMG42YCFJXzYdn8cmp6ZPDsyhMgDOfl5EzAccRN5EuhY4gRyg92HZPiqwgKSb6lulWWupBDbnIYdw7QwcWc4lbwV6kkM5Xy3Hpi+qFUFmw1OPX97FzLoqSV9WApujkYGf0SS9X3aZhixnGrtxd63dgjvWdZWSaSJiBnKK7ohkmd6KwP7k5NgewD/L458D8wFHlVLsH15DUt92/OxXShd3I++63xARe0fEiJUL59+QwaPxJb0BHEqW681Qx5q7goiYsGTANr6OoXiNxnO+JNsBrD+MltfyGv/2iOjZ+B4tpidvXPy3BDZ/Q/bJ+6eke8gbGI8C9wNjOrDZGiKi15B+D1QCmz0kfV6CmeuRN2fbJrAJUHpnfkQeh0cpj/WN1Fs5VOsqsk/yaPWttGuR9ICkRYF1yVYgFwP7RMR8ETFKOYe+CYbuGG7WHVXOldcmMzZvLIHNPwKLki0y3iz7bAscUhIIzIY7BzfNupHKBeHUETFPOUGbGKA0TX8CmCsijomIrclmz0i6sLZFmw2myud71IiYpHKCdTUwb0SMLOlBSddLOlLSamQfzYmAHYDVyc/8ksC+4GB+xZvAnmRQaGfguYg4JCIOIyelzwwcUvb9kgwwTVjHQltdREwH3AVsHBGTw4+CyEPjJLL/Zjt9ViMippf0fckQ6VUef5b8OXZ3+Xpb4DEyewTyPeoFXEZ+v1uNImL5iBhTUv9y8dvrl5/1Y5Ug51jAZmTmeDu6BRgJ2DdyaBBK35Wfjd+UX6PXuMYuSdJ5wBzkjbsNyDY2O5d2N419XOpo9mOfASNJeqZ8fSRZKfUf5QC00cihf+OQ2Zxmw52Dm2bdRGnOrNJ/7CrgQXJAyBURsVsprzkSuIhs/nwU+cNmo/L8Ib7oMOtMlYuL1YETImLViNiLLMW7FvimmvFV9u0n6dsS8LxROXF3G2CjiFits/8Ndatmn0RE74gYOSLGLxfJ15HZgQeTfUvXJYNq75G9Sl8u5aGzku0t/tP5/4IuQcAnwNHAyRHx51KaNWQvksfzecl+g3dK+mYYr7OV7UQG2I+PiJFK5hqSXpO0Vsna7EX2YOxRMoohqxEGAD3b7P1qOeWc4zTgo4jYEX7IQKwenwfndRrHrH3Lr4mH8VK7BEnXkO0YNiV//i1cCb4tTGZRPe1es0OnZAfvA8xOZn/vQfaZNrOOvQpMGBGzRsQ2wFTk+eMXZfsM5bHX5WG11kncc9Osm4mIh4BRgWOBMcmSvWnIQSv7SromImYs296pNlA3a2WVPj8LA+eSw0P6A9cD20h6q+zX4aTYiOhV7iZPQQb+7wQ2aaeG55X3cDlgY+A35ATLh4DDJL1Z9puUnI4+qqQXK8/fkMzwvFHSFp3+D+hCImIV4ABgCuAcsmfeE0Nykh8R95BZsitJ+nK4LLTFlL6aZ5IXTsuQgfS9JB1RtgcZ0Pw+InYh205sS05O3xpYGpha0ns1LN+KUoa4FHkOshY53ftvkq4q23uSMfxfzEiOiJnIwS/bkb1W2/LipbS62IZ8H8YhM5l7kOd4nwBLSnouPIn4V4uIKSS97vfSLFXOoceW9HFETALcA7xPBjJPA/aQ9HXJLt+O7IU9haSPwjMdrBM4uGnWjZSg5YVkEPPy8thEZCnXWmQw6AbgX5IerG2hZkOoo4BlRDxH9uD7nDypuhR4qJEdVPYZgfzcv1XJGpqULPG7U9JmnfRPqF3jIi0i5iKDu++T78PE5ETiD8kS6IM6upiLiAXJu/LfAUu7n+Evi4iRyezXXciex8eRg25ebLzHzZ/txtcRsQ5wOrC4KtORu7uIuBkYg8xQm4isLliaDMJvJenmyr5jAEcAawAjku0VDpF0bGev2zpWzkGWIDML/0hm2W8r6dWyvVf1mD2I17iODOb9SdInw3nJtarcgOoBzElmZT4DPEkes4McLLQUsBLZJuR+4N+SHnYAwcyGpWqAv7TaOYC8zny5nE+eQE5Jv5RsG/IFsDl57PqXpIN9k8A6i4ObZt1MRNwH7CnplshG89+Vx+ck76AtSQZ71pZ0QY1LNRtsEXEqGRA6uAQsRWZ3vUwOWViDHIhzKnC1pOfK8xYA9iYzvx4oj81BZjavoDIVtZ1ExF1kgHJnSY9ExFLANWRm1MxkBtyBkq5set5IwP8Br0h6rVMX3cWVC4J9yLL/J8i2ILdI+t8g9h+R/DxfD2z5S8Gf7qJk6N0D7C3pmPLYDmRAZ0Xy83cD+Z68VrZPQQboJwCelfR8DUu3Jk0ZtiMBawJbkD3YAI4Bdqqco/woyFkJ8i8LXAksJ+nazv1XdL5KdtSB5ICskcqmp4FTgCsqbRiIiNElfd75K+1ahja4EhGjAF+3a7awtbfy+f8D8J6kB8rNx8nIG9yvlH3mJI/v6wHjl6e+QU5QP7Ls02FFldmw5uCmWRdXucs/Hjkl83DgNklHl+19VKYel6+XJy+w12uXMkfr2kpg6AbyIu83yuFYjW2NC8GFgH8B8wN3kJmc75AX0wtJmrDpNUdtp89/JVCwMHAW2U/swnLseJQsAd6DzDBcj2wU/wR5AuteScNIyX79J7AQ2Rv5BOABSZ+W7Y3/p/3IjPvfSXqhrvV2tsgBePeSAwk2jIjFyGqEtcner38mMzonJwPEOzhLrfVFxLlkr97nyOza2YHfAd+SN1lOLvsFeW0yoPL102TW4vrVc5nuJCLmI2/efVy+npZ8r04iA7sCtgSWJ78/jgLukfR2PStuD6Wv79HAma52snYUETOTiQSTksM7NyGvH89r2m8UsiXanMDHZLXU22Wbs8mt03igkFkXVu5EDyhlthcCDwPLAUdExJYAkvpG6lO+vhJYVdKXMQRN/c3qUrJUVgVWUw4SWSIiLo+IqTVw0MjdkhYANgSmBc4mLwqXJAfj/DA0qwSQ2iawCT8axjQ5MDbwWjl2rExmax4l6VkycPQy2YPzHgc2h0wJxjQ/1qfxZ0n3AIuQN5hmBy4AjoqIycp2lT5We5CZbS82v1439z5wK7B+RJxDXlQ9ATyqHJRyGBl8P5H8rH4YEZvUtFb7GaWsmohYnRxieJCkNSTtxMAsn6eAEyPi/oj4vVL1InhHYErgn904sLkEcB9wYET8tvycmpc8Bh8r6SZJN0takSxFHxk4H/hnRCxeWl9Yk8b5bURsGhGXlB6Ag/vcxnF8F+AvZOsbs3b0HFn1dy/Zp/0jYIyIGLe6k6SvJL0n6QZJD1ZvvDiwaZ3JmZtm3UDpRzU3GdDpBSxG9mR6hCz7ur3sNwJ5/dwWJY7WPUXEPmS53qfkkJZ/Nkoby/ae5MnY+GRfzZs6f5WtKXKC8VaSDi5fX0MObFlH0vvlRsnlwO7AraWk1HfdB1Mlk34+YHVgLjJA+QCZKftlZd8xgN3IQOeUjeBNREwJrAac3o5tEwAiYj0yuDsy2aP0H8BzlRLmsYFFyfduWfI9nkOekN5yIuIMsqxxEUmvVMvPI6IR4J+x7L6jpMPLtpGBm8j+wPt012NQqUzYjrwJ9x1wCJmlvL2keco+1Z53I5BZ3XuSP+OmbZSH2k9FxAvA3WS7pg7bgAzieRMCz5PVUPt318+f2eCI7AF+NjkYrnGeeCpZefJZ2ac32TpmZHLopL9nrNM5uGnWxZUT45vIScenlMdmJi+s1wGmIhs876gyCdmsKyv9CJcmS1UXBt4FDm4uk2l6jvv9NImI0cjy/ckk/V957PfkZO+dJV1Y5/q6mhg4sGkGMvtwZDLrcGJyQM7jwMmSLmt63uiSPm8K+vzikJXuqFKW34f8vn4HmBp4jeyTew3wZiXQMyWZFdhH0oG1LNp+VkTsRfY9HldlGFC5ATWg/F9vQg7duhA4TtKnlc/BgsDTjbYN3VUJCswBbE9WKbxL3nTaALhBHQwfixzUtIykU+tYcyurfH7GJis4TpJ07hA+90wyULOIpHeG43LNWlIHx5tZgZfI9hi7kseo08lM8ifJn9W3kjdyd6pl0db2HNw06+IiYkzyou8CSdc2/TD6HZkNsBwwHtmz6pzaFmv2K1WzCEtmxapkltsswINkhs/9ZXvbBzQrF2pTk+XoPcngUKMX0u7AXsDJZJbKOsDEkiava81dXUTcQvae2l3SrRGxDNmr6n9An/LnEyQ9XOMyW1pkD+ldyKy+UckLqSXJfrqNfoMfVPZv++/1ukXE4uSwsZebHp+HzJy7BthV0kvl8caxaVPyGL6+pLfb+f+ylKQvR/aK/gPwKNmi4l5VhgY1v0fOrh+o8rmahAwO/wk4UdJ5g/s+lcz7+4ANJJ09fFds1poqlSgLAI81KiPKzalpyQqqDcmbj/eQwc05yRvmX7Tzsdzq4+CmWRcWEZuRvce+AI4A9iWnyvaoZACNRJ4sbwRs4fIl68qig4mnETEb2cNteWAccsL0rpLeq2GJLaOSSfhbsnx/ZuAbckDH5WTZr8ihNouTJY6PArtJuqVdsweHRuWCegHgYjIYd6Fy2NVdwJfAweQwofnIXoO3kxn13w3iZdta0426cYFlgJ2Bacgsv5PIrD73w6tZOcY8AFxGtrj4trJtZLK0dzMywHk6Gax7PyJmJLM655A0U+evvH4R0VvSd+VcbUZJj0UO1lqB/LyPTb5nZwLPqpv2Hh3WImIX8ngLcDP5ufxgcAIu5Zj9LbCi2qw/t1lVuUnwOHlueLxybkNjWx/gt8BO5LC/h4HDJV3o80eri4ObZl1Y5OTjncjm8z2AbSRdVLb1BKiUMzVKH32H37qMSoBuMrK/3hJkWcyFZGDjf5V9/0i2Y1gPWLO5/LddRcRDZPbbscCYwMpkgOgFYF9J15Qgw5jAO5Jer2mpXV5E/IXshbeypAdK1uYVwJKSbouIaYC7yAEVd0j6S32r7VoiIsiBWBsAm5NBnx0lHVPnuixFxN+BbyUdHjntexIyiNmvbN+J/N7oSZYwvk+WYk8CrC7p3x3dvOquyjnaiJK+Kl//mzwuL16CcD2B6cjP+ibk+3UUmfn9ms/jfl5pWfEnYCXgj8CdwA6SHinbOwxyRsTGwClkOfqdnbdis9ZTWjvsTQ5CHB34D9k+5JGm/SYmW4282+mLNKtwcNOsi4scELIyOUBlHjJrbWdJT5ftvYDvXRpgXVlE3Eb2v3oRGImcoHs9mbn8oMrgldJHckFJN9S01JZSgpYXkkHMy8tjE5FZVGuRgYUbgH9JerC2hXYTkf2ON5P0t/L1zUA/srzx/YiYgsya3YacAP6NbzgNmciBKrOSQ69OlgeG1aqjIFFEPE4eo/8BXC/pv+XxycgJ6IuSVSavAudKurgz19wKIvsbb09mFd5PtlXZnHw/vq7sNyJ5brcTOS39ZeBPvgk1eCJibjLreyPy5sgJwF6SPupg3yADyH2ArRuBebN2V7LztyKDnF+Sg/6Ob/cKKWs9Dm6adRMlYLEpGbSYkCw53VfdvBG/dV+VUt/FyROpLciAJuTFyr5kKfWZ5BCcpxpZMNXnd+6qW09E3EdOir2lUQJZHp+TPFldkgxyri3pghqX2uVExAiS+pXS22klPRkRI5Wg5VjARcBXklYs+y8EnEVmEF1R38q7vogYsVr+bK0jcqjWkeSx5T5yqu5NjUz7iBiDLPv9vtJCp62C/OVY8C+yR10/4BUyW/DT5sqbsv/oZBbiEpLWqmHJXUpTW4uRyZujq5M39b4GDpR0VAfPGwfo63J0s4FtMypfrwhsCSxEGZIInOMSdGsVDm6adSPlrvOswHZkNufIZHnupbUuzGwINV2YrEY2Ll9b0quVJuejk5kvO5LTZc8jBwe09WTTyvszHjAa2e/uNklHl+19qn3bImJ5suR/PV/QDZ4SfOjTyLCKiPPJiejrSvq4st8J5LF4N/IzuhnZc3MiB96tO6qWlpe2DEcDUwGXAOcCd/mmaypB3jPI/pp9yZt3e0l6qmwfoVLWP6qkLyvH97Yp4f8llfekB9kDcEOywuM74DQNHDI4AZl5tjawLNmb+1/1rNqs9VRaQY0v6f3yWAC9KseiPuRxfWPgDeAZYBXfbLRW4OCmWTdUyvb+QE5B3tV9g6yrqF6klN+XJzNblpC0QGW/avBzGnJwwMrAeNXgUrupnJhOSmYIzkn20hTZk/f4sl8AvRtBzsrzfME8GCJiEXKy/I3Am8C9ZODyXEnfVrKOFyJ7nU5K9oh8g+wTeakb7lt38nOZ8qXf5r5ktuapZGuGB9opU7NZ5RhxGnkjWsBiZE/Ss8jMwk/KvtMCp5GBOk/vblI5X9gH2BYYEfiE7DU9Gtn3eItGCW05Z/gDcKZymJOrPMyK0u7serJl0TmS3iiP9yQH1vYrvTifIHvgXyPpMH8fWStwcNOsG6uUR/oHjnU55e7wo0Bjiu7ewDHVrJ+mIOcUkl53gA4i4jpgbuBsoBd50Twb8Aiwk6Tby34jAHKQbciUjLTjyAtnyJLSJSV9XLKHaARuImJMYEXge3II1iM/fUWzrqkSWOoFTEYed/qQx5ovKqXoo5L9DDcEPgMWlfR4PatuHeVG00iSvi5VChsCC5KZ3oeQgbltgb8Dk6syRM9+9PmbkRxUdRz5OXuT7O26LNlv8yNgw8bPvsrzfX5sbasEKWmqOPkNeQNqFOAx4HzgCklfVPaZghy8tY2k58tj/l6y2jm4aWZmtStN/zcG/inpzfJYT3J67PLA1mQG4lFk+fnLlV5tPqGqiIjJgZuAwySdUh6bmew3tg4DS0R3bLzXNuRKb7YrgfmBj8nBTcdIerFsH4HsKfiTElJ/Zq27iYg9yMDcVOWhL8nsn3PIUvTPyn5zkD1n161jnXVruiH3kxtx5biyHtkbck6yXF3AIZL29c27jkXE0WQv7uVUBmqWx0cD1iR7wF4uaW0ff81SRNwD9AB2JbPpv61s+xt5Y6UPeU55oaQby7YVyCz89SVd28nLNhskBzfNzKx2EXEE8FdgU0mnNV0AjgLMSJ5krQk8S16o3Cjp7ZqW3LJKpuCxwAWSrm16L38HrAssB4xHnpieU9tiu6CSaRUlaHkDOfV5JGB24HUyW/aUSjBnOnJq9AWSLqtp2WbDXKWdxdJkps8V5IC3EcgMzq2A/sDOHZVTt2OgrlKO/mfgj8AUwFXA3cCbkr4p+81IZh2OXx4/pvr8elbfOprfh4g4BFhH0kTl617kzaXGz769gH3IwW+v1LBks5YSEb3JG97bkBVSZ5Dnji83gpyRvdv3BlYjb+K+DnxIHrveljRnDUs3GyQHN83MrHalBH0VMgA0ICKOBO6VdHFlnzHIcr1dyEmN1wCnA7fKg3AAiIjNgBOBL4AjyD53QfZJamS6jkQGNzci+5D5Qu9XiDK1OyK2ArYg+2s+AhwP3E4GePYCJlZp0G/WnUTEveQF79aS3qgE8GYkS6uXAVaSdEU795qtBIMXAG4lszLfJAMLLwInkNmuL3fUjzTabKJ8R8p5wOeVoGXjs7Y5eczdADi/8vOud+mruSE5BGUpSXfXtHyzllJuAkxIZov/jWyfcxhZifJu5ftobnJK+hzAOMCDwL6Snm7nY7q1Hgc3zcyspUTEXOSgls+AO4DjJD1atvUkMw5XIIOcUwCTuQ9ZioiFgZ2AeclSo20kXVS29QTQwEnGo0v63BfMw07kNN6dyBYAo5JZayPhklLrpiJiIjJQ96CkDarHk9J/dnbgZjLTfu0al9oyIuI2cpL3nsDzwAxkT83lgbuAY4C7Jb1b2yJbUDm+nk8GgW+X9GFl29Tk5zDIEttbJX1Qto1OVn7sAMzk99XaXUTMArwm6avydS9gZvL7ZD3gKXJQ563AJ5WbCROTLUf6S/q6jrWb/RwHN83MrDYRsT3wgqRrmh7/P3L69KLkidRlwAmS3inbewPTkhcq/3aAbqAyuGNlMmNwHjITaOdGH7Lmcj0bfJUsoZmBP5Hv70vkQKFbK/1i5wTWJ/vEPivp4Orza1m82XASEU+TF8ALla+bh2pdAUwALK0yAbzdVI4d45AVB/c2jguVfZYhW1jMQPZFPp0M4vmYwQ/H1evJG0eXkhPkH60EaH5LTpqfgjxnuJ1sY7MqeTw+Q9IOvslk7SwiJiSrS+4AttSPh3SOSvYR35U8/76KzL5/ovF9ZtbKHNw0M7NalJOo/5C92S4G9pL0QtM+K5NBzlmA18gLlzOaS2AcNPqpklG1Kfn+TUhmA+1bPZG1wdcovYqIeci+gjOQE40nKLvcRTbY/7ek78pzfriIdgDeuptKwG4bctjbCcDfG/1myz7jkN8XkwLztfP3QOnXuwF58+kaSSc2Hq9kRvUmj9lHA0dK2r6m5bak0sJmC2APYADZhuUS8iZp35JZti35Ho5ZnvY1cAGwWfm8+lhsbStyQvpFZPuLzUtf+8klPVu29yBLz/9MBjknBk4iv9de8Y0Ba2UObpqZWS3Khd70wFLkMKHxyT6Rh3ZwJ3lTYG0ykPQAcLakqzp7zV1NeY9nBbYjL6hHBtaUdGmtC+vCIuJxsqfpQcC9ZBnkDmRT/q+ArTw4yNpBJbg5CZndsyIZ5D+LvHgehzxu7wfsKOnEds6ai4i1yenxAE8Da0n6b2V7Ncg5AfBZ6efrYFyTEsTckzw3eIYMBt9YyZ4fjQzOfEEOQXm+BD/b9vNnVlXpR3sS2c/+GODyRm/wUuUzGdmffXOgJ7C9pDNrWrLZL3Jw08zMalUyMWYjp3hvBHxEDmA5r3oREhHTkCdY6wJPSfpjDcvtkiJiBOAP5Pu6q6Q7a15SlxQRi5Pljn+RdH7TtunIidGjAUtKeq6GJZrVIiLGJbPp1gTGIifrBllCfIOklWtcXksovR8XBzYm21pcQ2a8Pijpi8p+DmYOplKK/g/y59tN5LTn+yR9VOvCzFpQRIzc6JUZEX1KwH8T8gb4OOTNqQvIzPJGBcqIZD/O/YAzfXPcWpmDm2Zm1hLKFNT5yQDm8sA9wG7Nk00j4g/AR5IedxbGkImIkSR94zL+oRMRKwLnAqtLuqbRW5A8n/o+IpYmAxabSjqttoWaDQeV1gzTAksAYwPfAldLer7sMzeZMTc+ebF8NnC/pI98vP5hsNvEZCb9DuTNkNPJbNdnGwEF+3nNP8MiYnXgALL9wVnk5+5xDz0xSxGxPnAGsJOkw5q2jUgONduYHHZ2LXC+pHsr+4zs7ydrdQ5umplZS4mISYHFyL5Zc5EXKXtLer3WhVlbapr+PDvZFuEUSduUx3oCA0p57lRk5sPpkvaqbdFmw1hTyfQjwG+A78l+hp8AVwL7dzQwyDdTfqpULDSqETYG3iFLq6+V9HKda+sKStn599VgS+kduD2wE1kBcj5wjDwd3axRebIV2QrqTeCvkq5t2mc6YJ+yz2vAv4GLJL3UqYs1G0o9fnkXMzOz4aP0hPwRSW+RZTHrkT21Fgcei4jdS/9Ns05RMs0GRMTUEbE38DJwIbBVROwMIKk6eX5GYHTg1fL8n3y+zbqiSmBzV2BycmDLqOTF8gNkOfr9EbHloJ7briJi3IhYLSKWjohFACT1lfQMmS21FPBf4Egy2GlNyk0kIuK3EXEYOen5hYg4MyIWj4ixJX0laX8y8H4fsDOZWWzW9iTdTA7j+gs5DPHqiLghIqas7POipLWB1cke4tsCF0fEQjUs2WyIOXPTzMw6XSMbrgR/piQHUbwJvAS82hgoVIKZvwHWIS/69pD0j1oWbW2jZAXNCDxWynDvIYdZLUDeGD4aWAV4luxD9SYwO9kzdgxJ09eycLPhoClrcw/ymL1do09kRIxFllmvQWbbP04OhruulgW3gEoJ/6pkr+NZgP7kjY+HgJMbvY9Le4uxySDnnZJed9/NgRrtDCJiQuB+Mqj+BPl+zki2QDiLPD/4sPK8SSW95XYI1u46aOMwB7AJ0LgZdTjZBqpf0/O2JoOhC0r6vJOWazbUHNw0M7NOVwlu7kzeGZ4QEPAhcB5wNTkUoNHQfHxgbuDWMt3RZY423ETEAsApwOeUclsyk+HKcpE9LZmptjrZaL/hHmAXSfc2ghudvHSzYa4yFX01YFFgQkkrltLq/o3AUfm+WA3YEPhO0iz1rbo+lZ9vI5E3Ph4D/gW8QmZ+zwr8jxxAdrykV6vPq2nZLavy+bsImIkMrN8aEWOSPTbXJs8jngBWlPRefas1a02VmwQTALuTx+pnyRsEEwCfkecvJzc9rzFV3TcJrOU5uGlmZp2qcoI1E3nRdxFwGvAk8ChZ8vgKOWThWkn/bXq+A5s2XJWT/zWATcmL6deANSQ91LTfbORk6JmAt8mA/IeYdTMlcPk08AHwPrB0o5dh4+K3su/vgS8kPdqOF8SVYNzBZFXCepLuL8HgD8gbJlORmeCPkEHOoyV9WduiW1xETELePPo3sGs1w6wMI9wMOBjYRtJx9azSrHVVjkvXkMHMQyRdHBETk33u1wP+SGaW/1XS/TUu12yoOLhpZma1iIhLgfGAzSU9GxFzAg8CewDLAP9HTmy8CbhU0vu1LdbaUkTsS/Z9bQQdDiGDEJ9V9ulJlum+3yjTNetuIqI38FcyWDc/GfDfRdIlZXsAvZrLGttVySq8kryBt7ekzyLiDPLn2lxkSfVVwAxkWfWq7VzG/0vKsKDngOslbVZK+aMaOI+IZ8jP5Z/bLaBuNjgiYkbymHQQcGC1uqQMEzoAWLU8dBewBJmF74CRdQkeKGRmZp2uTJWemjx5akyGPYYcEnAEWS7zDLAccCwwbg3LNLuDnGS8HtkqYU/goYhYozIsaCYy+2r7epZoNnyVjJ/vJB1CHpsPLJtOjIhLI2IepX4R0avGpbaa94C+JbA5GZkNfgzQo2S6Pk1OSd/egc1f1J8soV0xIhaUNKBUgIwAPwTfXyL7cY5U4zrNWtm3wHdkO5H+kXpBDhMC1ifbO1wLvFgGnzmwaV2GT0DMzKwO3wHPA/8tvXx+D8xLDqWQpHcj4gHgYrLU9xmXo1tnk3Rb488R8SBwI1mqfj6wQURcS5ZxDSBLIt02wbqd6udZ0lvAnuWzvxWZ2fN/EXEecJSk/9W0zJYi6dOI2I7sYwfwe7Ik/TFJX5aAwidkQO5UcM/NqqYhVuNJ+iAi9gcuAU4uE9OvkPRxecpc5I2mm8v76+Ow2U99QN4k2DQirpX0ONC/0iN8JPJ85mrgbBjYSqquBZsNCWdumplZHd4hMzQvLV//FngDeLlk/4wGjAhMUw0wmdVF0tuSziaDmzsDkwBHAb8h+7x9Wy4QfEFtXV4p+yUiRo2IWSNi+fLnHgCS7pe0LrA18BQ50OXakkFnQAn0NnqRvk4eM/6vfD0HsDwwciNw4MDmjzQ+f38Bbo+IrSXdBexA9gs8AbgkIg6IiL2BM4GRgV3L8+OnL2nW3iR9RU5GnwQ4vBzXx6yUp88AjEa2GPm2PMeBTesy3HPTzMyGu0oj8x7AOJI+aNq+MpmRsQZwDbAwcBJwjqQ9nYVhraQEcCYme8b2K9kPZt1CZejb+GRbkKXJaq8BwPFkRs8LjYvfiBgd2A54VdLZ7ZiB2JRp2AMYVdLnle0TA+cAM5OBzonILKnfSnrd2VEDVT5/U5MtPy4DDpP0TNk+OtkaYVlgHLIU/TLgJEk3+700+3kRsQkZ5OwPXEeWovcFNidvHkwq6Rufe1tX4+CmmZkNd42L3YjYClgFOFfSaZXt05FlMJOQvTanIctnZi5BUZ9gmZl1gsrNqCuAuYFzgVvItiGbA28BxwEXAu9UJ6VXn9+5q65P5edbH7JP9GZk0O1b4GTgKkkfl4nzO5M/314nB+Vd52Bcx8rQwamAdcrQwR5kv9L+ZfukZIC4v6RXa1yqWZfQdBNmanKA5+pAHzJb+k7gCElXVkrVzboMBzfNzGy4qmRhzAXcSvYrPFDS2037jUqeaC1GXkhfJulhn2CZmXWOSqBuQeAG4K+STi/bbiKDdi+QF8S3k5mcdzRn47eTynt2NLAh2XblXWAsYBbgcfJ9vKvsP3alV2TbBYMHR0RMRJ4v3AlspR9PRR+htK/pAczYyOg0s8HTFOScGJgc+BR4Q9LXda7N7NdwcNPMzDpFRPyHLGvcXNKLTSdXP2SuREQfSX3rXKuZWTuLiKOAOYEtJT0VEX8iJ+guAzwJnEiWBQP8Q9Ie9ay0XpXA5hzAQ8B+ZAn11xExA9liZTtgemDj0rfXfkFE9AReBG6VtGlEBPx4uFVEbEv2el1e0tP1rNSstQxuW5DyPRXt1kLEujcPFDIzs+GuXOTNANwHvAIDL1JKkPP7iBgvImZ3YNPMrD4RMRLwDVly/lR5eH/gCuD+knV/KPAgWYJ9cnle2w1xqQQG1gb+B1zeyHyS9Dw56GYbMptzi4gYpY51diXlcyQyiL5SRCygogQ9G5/Rkcp+H9a3WrPWEBFLluSAwQpWlm8pBzatW3Fw08zMOsNn5IXIF829xSqZGOsCh0bEJJ29ODOzdlYNTEr6huyzeUzZNhs5ZOIuSZ+W3SYi+7TdLemN8rx2Lgf7hHyPXgGIiF4lg6q/pNvIlivzkRmc9jMqQZfjgBGBwyJi5VLO3zh/WAzYCrhe0nulRN2sLZU2DtcDm0VEr1/xOlF+H3lYrc2sM/kHgZmZDTMR0TsiRuxg0xfA88C6ZXhQY//GidRowKTA2GTfHzMz6zyNY/GqETGGpKck3V22vUPenJq07DMxMBd5vG7rQS6VQMLjZLB3t4gYqQQ1B0TECGX718BXwIQ1LLNLknQzmRk8I3A6cGpEHFRaJpxGvp871bhEs1axJ/AccLOk/uVcfIiPNSU7embg4VJxZdalOLhpZmbD0jnASRExQ6N8DEDSV8AFwMzA7hExU0T0rmT6LAasBVwh6StnYZiZdZ4SiBsPOBs4qNxwavQ+/JgcLrRtRJwHXAT8jZyq27d6rG8XlX/z2hGxHPAEcDOwAxngnBWgDL4ZB5id7Dn9QB3r7aoknQfMCpwH/A7YEtgc+DewbuPz5/Jaa1cR0Zu8+fQl0BjUeSRD3wbjn+TNrrYdEmddlwcKmZnZMBERfYC9gU2BvsBhwAWS3q3ssxN5h/lTcmr6+2R547rA+5J+U/bz9Fgzs04UEaMCh5MTv3eUdFRl2/jAFuSU9H7A+ZIOLtva8ngdEROQWa37S9q7BDHPBpYCHgXuJvtG/hn4AzloaN/qAD0bfBExLjAO8JEk99k0KyJic+B44GjgRnL422bA2ZK+G4znR8naXKo8dzlJ1wzPNZsNDw5umpnZMFNK8GYCticHLDxG3gW+XdInpWR9MfLieWnybvN3wBnASZIej4hekvrX8g8wM2tzEXE08BdgN+DYxsVx49gcEaNL+rw8NliTebujiPg/crDSDpLurzy+BrA72V9zBHLQ0CmS9ivb2zIYbGbDRzn33pU89x4FeBn4k6TXG+2fBueYExFPA88A63i4p3VFDm6amdkwV5qRL0xeHC8MXA4cIumBsr03MDrZS+uNxkAKMzOrRyV4OS1wAlkOvIakO2peWstoZF1GxFTAysAuwOyS3i6TivtW9p2XHDT0RaOCoZ2DwWY2fEXEMWTbhp5kn/udJV1dtnV4U6WStfk3YH/gd5Ie68Rlmw0zDm6amdlwU8rIViQvACcATiJLZ153WZ6ZWX1+LoOwlFhfC0wNrF2Gu1gREY8Ac5ItVnaTdFJlW+/BKQU1MxuWIuJG8pj0ILAGMDfZL3lnSU+VfX5y3C/H++eBU4DdfQPGuioHN83MbLgq02SnADYhhwF8DBwMXCbp/TrXZmbWjirZOiMD/we8AvSV9HZln1mBC4HPgI0kPe+S6lQmCh9C9tcE2Ac4R9KrZXsPshK07d8rM+s8ETFWaQM1F7A8sBHZ2/44YG9Jn3bwnBPLvr+V9FZnrtdsWHJw08zMhpvqhXBEjESWOe4IrEBOl91T0o31rdDMrH1FxP5kf8j3gfeA14Fby59vItuKXFweW0fSxzUttSVFxKJk0GBG4BbgdOB6SZ/VujAz6/YGkYX5Q3uMMi19AXIQ3JrkjaojJB1S2X9UMuHgLuAi35CxrszBTTMz+9UqfcjGAJYhpzS+AzwM3Cnpocq+YwC/A44EjpR0TA1LNjNre2UoTh/yAngOYHJgNuB7cjDFbcAMwKTACpKuqmel9Wr0yiwT0mcm36OngKclvRcRfyEnzfcCziEzXm9zeaeZDS+VDPy5gZWA+YC3gBeAiyW9VPabAPg9sC0whaTJml5nLLI3sId5Wpfm4KaZmQ0zEXE2mZX5OvANMAvwOHANcIGk18p+PYGxJX1Qvnapo5lZjSKiD9ADGA1YBBgL+AMZzHta0ur1ra4+lZt3kwEXAb8l36cgAwnnAf8C+gLHABsCA4CFqlPUzcyGlcoAuPnIY9DEwHPkDapRyfPwE4ETJH1bnjM98L2klxvPr2n5ZsOFg5tmZvarVC78lgSuJieknyzpi4h4iuy32Qu4h8xoubKjnj9mZjZ8Vad1R0Rv8ibTu7/wnAmBbyV92jjed8ZaW01EXEcGeo8D7iODv7uQWa/3kGX7r5cp6X+TtFZtizWzthARjwGfAAdIuq30ud8C+At5/r2vpEPrXKNZZ3Fw08zMhomIuB74CthR0msRsQpwPjlw4f+A/ci7yi8C+0l6pLbFmpm1maYeyH8lyxinIo/LRwH3VXtqlgz7Ac6qh4iYAvgPcKik45u2rQucADwGrCjpw8q2tg0Gm9nwFRELA9cC20k6rWnbJGRG52zAEj7ntnbQo+4FmJlZ11cu/MYB3myUngP/JEv47pZ0AHAZMAE5oOKDOtZpZtbGegBExN/J6d6jkMflMYErgGMjYoGIGBFA0vcObP7gC7KH9LuQgeKSIYWkc4CdgQWBP1af5MCmmQ1Hvcqvr+GH41KPclPlf2SPzTGBxetbolnncXDTzMyGhffIi+OrASJiBWB84ExyMAXAG8DRwDSS3ogI/wwyM+sEpRz9+4gYn2wdchrwR0l/Ax4k+0UuDFwP7B4RM5bMzbYXEZsBHwJLA4tHxEgApd9d4+fYTeQk4jlqWaSZtaNXgM+BNSNibKUBZM9fgI/LPhNHRNS1SLPO4gtLMzP71Uqz8kOA28tDY5IXy1+WC8Cxy2MLAp+W53iKrJlZJ6gcb7cEXiUn6X4aEZMDmwF/J4fBvQHsTk5JX7+GpbaiZ8mheJ8DawFrNTJaK+/rBOR11ee1rNDM2koJVr5J9rJfFtijTD2nknE/KzAeORBODnBad9er7gWYmVn3IKlf5cvnyGmNa5QJvPMCqwC7lBOsHg5umpl1nnIsHh14GXi+PPwv4BngEknvRMQBwB7A/8ghFW1P0l1laMcqwObAKRGxGnAo8BEZPPgrGdg8DH7c39TMbFgrxxdFxC5AAFsDK0XEMcBbwJTAesB7kk6qbaFmncgDhczMbJgrd4f/AhwPfEneTLtJ0gp1rsvMrB1ExP8Br0h6r+nxNYBxJB1XeiXfCxwBHFmy7Jcn+3GuLOmVzl53q4uIiYBNgU2ASchenO8DZ5ADmW6PiF6S+te4TDNrA42bKKXdyFJkZvnvgd5Af7Jdxn6SHvRxydqBg5tmZjbcRMQYwKpkGeSTkj7w9Fgzs+Gn9Mp8gcwo3Ak4rXpRW7kgngy4BzhV0n4ls3NDcjjO7yS9VcPyW165eTcbmSm1ODAhsKmkc8v2HmRilS+yzKzTRMTEwIjAnGT25tOSvqx3VWadx8FNMzMzM7NuovTRnB1YDVgTeAz4u6Sby/ZGcHMM4D4yy2dHYC5gI+A/ktZ1afXPi4jeZHBz2/L748COkm6rc11m1l58rDZLHihkZmZmZtYNRMSswLHAU2TAcm2yPPHGiLg4IqavDMP5DFiDHEpxGTlI6Blgq8bLdfLyuxRJ30m6lqxO2BwYAbglIm5sTFQ3MxveBiewWTLKzbo1Z26amZmZmXUDEXELMDawkqTXymNjkxmZWwHjkz02Dy3T0gOYCBiXHAL3X0lfuH3IkCtl/rsAY0lau+71mJkBRMRIkr6pex1mw5uDm2ZmZmZmXVxETA3cQfbQ3Lc8tg9wFtl/bXZgXTLQ+RGwj6Qza1lsN1Wyo3pJ+q7utZhZ+6q0H/k9sAewgaT/1b0us+HJ6clmZmZmZl3f28D/gKUjYooy+XwvYE5J/SQ9XL5ehezDeXpE3BURC9W35O5F0gAHNs1saDRKxyNi9ohY7te8VqVU/RjgS/KGllm35uCmmZmZmVkXJ+lb4Gzgt8CNwGnATcDdlX0+k3QjWaK+Adkn8s6I2L7TF2xmZj+QNKD88RJg54gYp7q9Evz82RhOaTdCRGwOTAMcVH4+mHVrDm6amZmZmXUDko4HJgH6AGMCEwB/jIgJmvZ7GziPDHDuA1wBAy+Kzcys80REz/L7BsCkwD8kfdS0vXdEjNAIgg4qyFnK0ccA9gdOBh4ezss3awnuuWlmZmZm1sU1eqyVP78JPAtMB0xMZgKdDdwn6Yum540gqV/1+WZm1vki4kPgMmB3SR9ExPTAssCmwMfAG8Clkv49iOc3em0eSbYgWagxXM6su3Nw08zMzMysm4iICYGlgJuBT4GdgO2Br4HTgUuBJyT1r2uNZmaWIqKHpAERcQiwJrCUpP+WbY8AMwH/BfoBswIDgHOBAyS938HrzQQ8QR73j/NNK2sXDm6amZmZmXVjETEdsC+wBvAUcCJwi6QXal2YmZkREWMBrwLfAWtKujUi/gFsTvZIvrRk2M9HHsv/CGwu6dQOXusGsi3JnyR92kn/BLPaObhpZmZmZtYNRMQ0wBTAV8AnzcHLiPgDeWG8AHCSpC06f5VmZlYVEZMBfwGWB2YBLgb+RB6vj5fUNyJ6Svq+9Ea+G5gMmFXS502vczOwk6SrO/vfYVYnBzfNzMzMzLqoiOglqX9ELA4cS/bZ7Ac8Rk5LP0XSm5X9e5MX0Q9Luq9RElnH2s3MLEVEL2BeMsN+ZeAbYCNJd1b26VMCnccAKwFzNpemR8SIQD9J33fe6s3q5+CmmZmZmVkXFxEvAx8ChwEjA+uRgc63yGFCp0vqW98KzcysI00D4cYAFgfmBA5pLi0vk9MPJ3srLybpreo2BzWtXTm4aWZmZmbWhUXEHMAFwNaSbi2PjQhsBKwPTAI8Cpwq6aq61mlmZj9WDWw2PT6WpE+at0fE3ORAoYclrTuo55u1mx51L8DMzMzMzIZMRPQov/cBxiIn6H5cHhtR0reSjgdWIS+EpwIujIgV6lmxmZk1q2RsRvV3SZ80tle2TQ1sBkxATkMHiM5es1kr6lX3AszMzMzMbMhU+mQeSg6hEDkhF0nfRsQIwIDSb3PXiLgKWAu4soblmpkZA0vHI2JjYGzgYkmv/1z2ZQlw9gFuAMYHdpX0gcvQzQZyWbqZmZmZWRcVEdsC2wJTAx+Q2TwXNi54I6K3pO+anuMhQmZmnaxRQh4RUwEvA1+Rg9/OAW5v9Nf8mVL1hYCpJZ39c/uZtSMHN83MzMzMurAygGInYAvge+Basr/mPWV7AD0l9a9vlWZmBhARBwBbk5mYS5GZ9+cAFwIPSupX9vvhRlQHvTcd2DSrcHDTzMzMzKwLGVTmZUTMDuxNTtp9B7iEnJL+cicv0czMOhAR4wOXApNImiYixgIOAzYAXgFOA66Q9GzlOVMBYwAvSPq681dt1vo8UMjMzMzMrAuoDJoYEBE9I2LRiJgnIuaIiNEkPSFpJWA94FNgK+CGiPhDjcs2M7OBvgD+B9wKOThI0kbA/OXxA4FTImKjiBg3InqRWZ43A6PVtGazlufMTTMzMzOzLqAyiGIVss/mQmXTu8B/yF6bV1f23wnYHJhX0kedvmAzM+tQRIxYhr81blo1pqavTQY4JwEuBp4G/gbcKmlN90w265iDm2ZmZmZmLa5xQRsRE5EXu88C15FljCuUX++T09NPlfRVed7Ikr72VF0zs/pFRB9JfX+uh2ZEjAJsB+wKjAx8DUwo6UsHN8065uCmmZmZmVkXERGnAQsAa0l6rPL4/wHHA9MBq0i6ISJ6eYiQmVm9hmT4T9MQob8CRwDbSDrOx3SzQXNw08zMzMysCyhT0a8AvpG0dHlsBKC/JEXEeMDdwCfAwo2Ju2Zm1vkaQc2IGBWYm+yD/CpwJ3DjzwUqI2IG4CRgCklTVV+vE5Zu1uV4oJCZmZmZWRcg6TNyUND0ETF2RPQABpSL5xEkfUAGNycAxm70cjMzs85XCUQeCFwA/B7YGDgdWKR5/3JMb5gKmJUcJkTJ2nRg02wQHNw0MzMzM2txlYvee4Gpga0lDSgDhnpJ6hcRvYGPgAB6+ELYzKwejWN2RKwGbEC2DZkTmIs8Rm8cESNFxBgRMWHJyvyhl6akG4AVJF1bvnY5utnPcFm6mZmZmVkXUvpubgjcRg6deKpkby4HHAncIWlDlzCamdUrIp4BHgZ2lPR+RPQBTgXmAW4FlgL6Ao8BR0l6sHkAnI/lZr+sV90LMDMzMzOzn6pMSJ8CmBiYDLga2B74kCxvfAJ4MiJEljG+DmzbeAnAF8RmZjWIiFWA3sDVkt4HKJPSZwRGIY/juwOrAWsCE0XECpI+r76OA5tmv8zBTTMzMzOzFtPI3ImIyYGrgJnIQGVf4AAyQ/MaYHFgPmA04GDgGklfNGf+mJlZ5yk9j8ciW4U8Xnl8C3K40PLAtaUU/cKI2Bo4mixhP7qz12vW1bks3czMzMysRUXEdcD0wHFkls/vyAyf94C/A1eUTKCRJX1d30rNzKxZRPxO0p2Vr/cBxgF2kfR1RIwo6duIGBd4CTha0l4uRTcbMs7cNDMzMzNrQRExKTAlcKikE8tj1wAXAVuS03fvj4g9JN1W20LNzAyAiJhS0muNr6uBzfL1PhHRW9J35aG+5feJgfcpMRoHNs2GjKelm5mZmZm1ps/JIRPvwg9DJT6RdAuwFbAuWap+S0TsWN8yzcyseDAino6IuRsPRETPUqYOQCOw2cjOjIgRyPYiU5BtSH6Ytm5mg8ffMGZmZmZmLSYiNgM+BZYB5ikXxhERPQEkvUNmbm5IDqS4pDwvOnxBMzMbriJiTOAI4DvgoYg4LyLGlvR9CWL+qHK2kp05P7A1cL2k+0vQc0CnLt6si3PPTTMzMzOzFhMRCwM7AfMCPYGtJV1UtvUAaFz8RkQvSf3do83MrF7lBtOc5AT0dYFxgT0kHVLZ54eBbxGxKHAm8DWwmKR3PBDObMg5uGlmZmZm1oIiYlRgZbIEfR7gemBnSU+X7b4ANjNrEdUbTBExGvBnYEdgDuBF8vh9ZdneE+gNLAysDVwo6Xof182GjoObZmZmZmYtLCImAjYFNgMmBI4B9pX0aZ3rMjOzH2sEJyNiWzJ7c2Tge2BSYALgTmALSc829gd6SOpXvnYGvtlQcHDTzMzMzKzFlVLHWYHtyGzOkYE1JV1a68LMzAzIliGSBkTEvMC9wN7AyZI+iIi5yB7KW5JBzsOBvSR9Xd+KzboPBzfNzMzMzLqIMlX3D8BewK6S7qx5SWZmVhERpwN/BJZsZGiWx3sBywPnAyOUh9ds9FM2s6HX65d3MTMzMzOzVlBKF2+IiDskfeMSRjOzlvMOMBbwCvwQ1JSk/sC/I2J7stXIa8BzdS3SrDvpUfcCzMzMzMxsyEj6pvzuwKaZWWt5ABgF2CciRpPUv/ThbGRrCvgM2F7SE6XtiJn9Cs7cNDMzMzMzMzMbNm4HbiB7JBMRFwFPSOoXEeOQw4UmIjM8fZPKbBhwz00zMzMzMzMzs2EkIsYFTgeWBZ4C7iq/L1ke20fSAY3p6vWt1Kx7cHDTzMzMzMzMzOxXiIjJgJHJZMwXymOrAHsA0wMjktma50nauWx332SzYcDBTTMzMzMzMzOzIdDIuoyImYBtgM2A/sDXwG3A3ytBzrmBT4BvgHclKSJ6SBpQ0/LNuhUHN83MzMzMzMzMhkJEPABMDFxBZmbOCSwGjAHsLenA+lZn1h48UMjMzMzMzMzMbDA1si4jYkNgZmAdSVeWbaMAiwBbA/tGxKeSjqtvtWbdnzM3zczMzMzMzMx+RqM/ZrWcPCJOJLM0F5X0v4gYQVK/sm064CJgLOC3kj6sbfFm3VyPuhdgZmZmZmZmZtaKIiLKH0eIiJGa+mR+BEwJfA4gqV9E9Cz9OF8EjgImB8buzDWbtRsHN83MzMzMzMzMOlCZZr4rcEpEjFrZfDPZ7u+oiJi87P+9pO/L9l7Al8AEnbVes3bk4KaZmZmZmZmZ2SBERCN2sjywVWXTQ8D5wAbAPyJi0YgYvzxnNmA54G1Jd3Xics3ajgcKmZmZmZmZmZkNQilF3y8iPgAOiYgJgX0kfQasExH/BXYGlgGeiIivyanpfYBVASKil6T+9fwLzLo3DxQyMzMzMzMzMxuEynT0XsB2wIbAMZJOKNv7ADMC6wErAF8BTwBnS7q5nlWbtQ8HN83MzMzMzMzMBkMJZB4EbAvsIOnIpu29yYzNrypT1UMOvpgNNy5LNzMzMzMzMzP7GaXvZg9JfYHtIuK98nsAJwDfAT0lfVf+/AMHNs2GLw8UMjMzMzMzMzMbhNIvc4Ck/qU0HeAM4AZge2Dxsr1fCXaaWSdyWbqZmZmZmZmZGQNLyCNidrJ/5sxkD82ngZMkfdm0/8nA+sBfJZ3Y2es1Mwc3zczMzMzMzMyqg4PmAy4BRgVeBcYEJgb6AccAJ0h6qzxndGA/YGFgb0nXuMemWedycNPMzMzMzMzMrIiIh4H3gP0l3R8RkwO/Bf4MrFK27SDpirL/lGTfzbmANST9p451m7Ur99w0MzMzMzMzMwMiYjZgMuA6SfcDSHpD0r+BHYG1gDeAyyLiiIgYWdJrkpYC/gMcFRGz1rV+s3bkaelmZmZmZmZmZul1IIAx4EdT0vtL+hC4KiJeAXYANgGeAM4sz90HWAz4tHOXbNbeXJZuZmZmZmZmZm2vTDrvCVwJzA2sIunuyraQNKB8PQ5wATALMKek98vjvSV9V8f6zdqVy9LNzMzMzMzMrO0p9ScHBH0OHBMRq0TEqGXbgIjoWQKYHwGnkhmeY1dew4FNs07m4KaZmZmZmZmZtbWSmQmApAeAv5O9N08Cdo2IuSKil6TvKwHMnkB/cpq6mdXEZelmZmZmZmZm1nYiIlSCIhExCvAdMJKkz8tj4wLHAqsBz5Hl6ncCtwLLAX8Fekuar4blm1nh4KaZmZmZmZmZta2IWA/YBpgUuAbYV9Jble1/APYF5gH6kVWwIwKPA+tLeioiekr6vrPXbmYObpqZmZmZmZlZm2kEIyNiEzJw+SE5+XwF4EZJq3bwnIWB35Dl6O8Ct0n6sJoBamadz8FNMzMzMzMzM2s7ETES8BY59fwQSa9HxE7AmsAfgN+Tg4U+AZ5oTEo3s9bSq+4FmJmZmZmZmZnVYEPgY+BcSa+Xx+4CNgEeA8YFRgZeAE6JiLNKpmYPBzrNWoenpZuZmZmZmZlZOxoBGIXMzGz4IzAlcCGZvbks8AVwILAigAObZq3FmZtmZmZmZmZm1o4+BSYE/hARvYEZgJ2Bw8mhQt8CRMQd5KChgyPiUkmfDOL1zKwGDm6amZmZmZmZWTs6B1gKOBoQ2V+zB3BfJbDZS9JXEfEgMCswHj/O9DSzmjm4aWZmZmZmZmZtJSJGkNQvIrYDLiOHB90L/BYYtezTR1LfiOhDBj+/I0vUzayFOLhpZmZmZmZmZt1edRCQpH7l93eAi8svIqIXcFxEvCzpgfLUxYBVgaslveOBQmatJSTVvQYzMzMzMzMzs+EuIkYihwS9C7wPvCypf2X7yMBNwALA9UBfYD7gPWABSd86uGnWWjwt3czMzMzMzMy6rYjoUX5fH7gduAi4AzgfWKmyX0j6GtgNOAOYGvgzcBXwlxLY7OnApllrceammZmZmZmZmXVLJWCpiJgLuBF4CPg32T9zG2BGYDlJtzf2Lc8bFRgFGCDpg5qWb2aDwcFNMzMzMzMzM+vWIuJmcijQVpJeLI/NRJagPyhp5fJYAMjBErMuw2XpZmZmZmZmZtZtRcTcwFTANcCr5bGQ9CxwIrBURMzb2L9keo4SET1rWbCZDREHN83MzMzMzMysO5uY7J/5nKT+jezM4nJgALAk/BDY7AOsBhwVEb07fbVmNkQc3DQzMzMzMzOz7uy/wHXAK5ABzEbZuaRnyF6cy1f2nxU4GOgr6bumYKiZtRgHN83MzMzMzMysW4mI0Rt/lvSapGUpJekduA6YKyKmLYOENgB6ATsN94Wa2a/m4KaZmZmZmZmZdTc3R8Q1ETFN4wFJ3w9i33vJ0vRlgfmBzYDdJA2IiF4eLmTW2jwt3czMzMzMzMy6jYgYDdgBWA8YHzgMOEzS5z/znLuB3sBbwIySZu6MtZrZr+fgppmZmZmZmZl1KxExItk7c0NgbeATYBfg3x1lcEbEVsAx5cuFJd1Tsjb7d9aazWzouCzdzMzMzMzMzLoVSd9KehjYnQxufgNcCFweEfM39qsMC7q//H55CWyGA5tmXYMzN83MzMzMzMysW4mIEST1i4gVgE2ByYEJgRGA0YAzgH0lvVl5zpTAB5K+ioieP9Oj08xaiDM3zczMzMzMzKzbiIgeJbA5EXAe8CawMjApsBxwMLAi8GhEbFUmpDemqn9V/uzAplkX4cxNMzMzMzMzM+t2IuIgYCNgSUmPVR4fHVgKOJvM5HwD2ErStbUs1Mx+lV51L8DMzMzMzMzMbDj4HBgV+BAgIvpI6lumpl8UERMAmwFfkUFOM+uCXJZuZmZmZmZmZt3R88CIwFoAkvpCBjnL9g+B/sBGkq6oY4Fm9us5c9PMzMzMzMzMuh1J/46Ia4C9ImIM4AxJL0rqGxEjAuMAIwMf1bpQM/tV3HPTzMzMzMzMzLqViOhNVquOCZwPLALcD9wG/AdYHlgbuEbS+hERcoDErEtycNPMzMzMzMzMurSI6Cnp+4iYB1gdWIzMyrwSOAtYGtgamJjsrzkAuApYV9JXZcL6gHpWb2a/hoObZmZmZmZmZtZlVQKbUwJ3AKMDTwC9gZnJDM69gJOBWYEAvgaek9Sv8fxaFm9mv5qDm2ZmZmZmZmbW5UXEtcAEwM6SbouIsYHZyPLzTYBrgXUkfVbjMs1sGPNAITMzMzMzMzPr0iJiGmBuMjvzLgBJHwN3RMTzwKfAjsCfgItqWqaZDQc96l6AmZmZmZmZmdmv9DnQE+ghqR9kuTqApHcl7Qy8BaxU3xLNbHhwcNPMzMzMzMzMurovgeeAjSLitwClD2dPgIgYDXgFGD0i+tS3TDMb1hzcNDMzMzMzM7MuTdI3wFHAuMDhEbFiRIxdGRQ0OzAV8KSkvhERda3VzIYtDxQyMzMzMzMzs24hItYjg5wB3Aj8FxgArA+MDExbgps9JA2ob6VmNqw4uGlmZmZmZmZm3UZETArsDqwFjEQOU74UOEHSfyKil6T+da7RzIYdBzfNzMzMzMzMrNuJiPGByYCPgdedqWnWPTm4aWZmZmZmZmZmZl2SBwqZmZmZmZmZmZlZl+TgppmZmZmZmZmZmXVJDm6amZmZmZmZmZlZl+TgppmZmZmZmZmZmXVJDm6amZmZmZmZmZlZl+TgppmZmZmZmZmZmXVJDm6amZmZmZmZmZlZl+TgppmZmZmZmZmZmXVJDm6amZmZ2TAREa9FxJlD+dzbI+L2Ybuiwf67e0XEvyLizYgYEBFX1LEOMzMzMxtyvepegJmZmZl1johYAFgCOFLSpzUvp5VsBOwEHAk8CrwxPP6SiFgLGF/SkcPj9c3MzMzaUUiqew1mZmZm1gkiYkfgEGAqSa8Nh9fvAwyQ1G8ontsbQNJ3w3pdg/F3XwgsJGnS4fz3XAPMKmnK4fn3mJmZmbUTl6WbmZmZ2U9ERI+IGHFIniOp79AENstzv6sjsFmMD3xa09/9q0XEyHWvwczMzKwuDm6amZmZtYGI2IfM2gR4NSJUfk1Ztisijo2ItSPiaaAv8KeybceIuDciPoqIbyLikYhYpYO/40c9NyNig/K6C0bE4RHxQUR8FRGXR8R4Tc/9Uc/NiFikPHe1iNg9It6KiG8j4taImLaDv3uriHilrO/BiFj4l/p4RsSUESFgUWCWynuySNneIyL+FhFPl7/7vYg4KSLGanqd5SPi2oh4OyL6RsTLEbFnRPSs/vuAZYApKn/Pa03v05RNr7tIdT2V9+mpiJg7Iu6MiK+Bf5RtfSJi34h4qazjzdJLtE/T6y4eEXdHxKcR8WVEPB8R/xjU+2RmZmbWytxz08zMzKw9XAZMD6wJbAd8WB7/oLLPYsBqwLFl+2vl8b8CVwHnAb2BNYBLImJZSdcOxt99DPAJsC8wJfC38nesPhjP3RUYABwKjAHsXNYxX2OHiNiivN5dwBHl77ii/J1v/cxrfwCsC+wOjArsVh5/tvx+ErABcAZwNDAVsDUwZ0QsWMlS3QD4Eji8/L4YsB8wOtnLE+DAsv5Jyfefsu/QGAe4HrgQOBd4LyJ6kP9HCwEnl3/DbOXvmh5YASAiZgGuAZ4E9iKD2NMCCw7lWszMzMxq5eCmmZmZWRuQ9GREPEoGN68YRM/NGYDZJD3T9Pj0kr5pfBERx5KDd7YHBie4+RGwhEqz9xKI2zYixpD02S88d0RgjkbJekR8AhwVEbNKeqr06twfeAhYTFL/st+TwJn8THBT0lfAuRGxCfC9pHMr/8aFgE2AtSWdX3n8P8ANwKpA4/G1qu8PcGJEnAhsGRF7lHL9myPif8BY1b9nKE0IbC7ppMq61gH+CPxe0t2Vx58q61lA0r3A4mSAeilJH2JmZmbWxbks3czMzMwa7uggsElTYHMsMgPxLmCuwXzdkxuBzeIuoCcwxWA894ymXpx3ld+nLr/PQ2YyntIIbBbnkZmbQ2tV4DPg5ogYt/ELeITMuFy0sWPT+zNa2e8uYGRgxl+xhkHpS2aTNq/3WeC5pvXeVrY31vtp+X35EmQ2MzMz69KcuWlmZmZmDa929GBELAvsAcwBVPs3qqP9O/BG09eNoONYzTsOxXMbAdKXqjtJ6t/oaTmUpiODuO8PYvv4jT+UUu8DyHL00Zv2G+NXrGFQ/tfB8KXpgJn4cZuBqsZ6LyIzUk8FDoqIW8mWBZdKGjAc1mpmZmY2XDm4aWZmZmYN3zQ/EBELk70c7wS2BN4B+gEbAmsN5ut+P4jHYzg/99foQQY21x7E9g8AImJM4A7gc7KH5cvAt2RW68EMXqXUoILEPQfx+E/+n8rf81+yVUBH3oTMMo2I35GZnMuQQ6NWB26LiCUkDer9NjMzM2tJDm6amZmZtY/BzbSsWpkM1i0pqW/jwYjYcJit6td5vfw+LfCfxoMR0YscLPTkUL7uy2QPy3ua+mk2W4Qsi19J0p2Vv3+qDvYd1PvfyEYds+nxwSnbb3gZmB24takFwE8XkRmat5Zf20fE38mBR4sCtwzB32lmZmZWO/fZMTMzM2sfX5XfxxyC53xPBuV+yCKMiCkp07dbwMPkwKJNS0CzYW0Gr+x9UC4m/817Nm+IiF4lYxMGZpZGZXtvMsu12Vd0XKb+cvn9d5XX6AlsNoTrnQTYtIP1jhQRo5Q/j93Bcx8vv/fpYJuZmZlZS3PmppmZmVn7eKT8fmBEXEiWl19dpoYPyrVkqfMNEXE+2btxK7LH5W+G52IHh6TvImIf4BiytPpiMmNzAzJoODTZqki6IyJOAnaLiDmAm8j3azpyeM9fgUuBe8nMy7Mi4ujy961Lx2XzjwCrR8Th5HT3LyVdLenpiLgf+GcJPn4MrMGQnaufA6xGTkZfFLiHDM7OWB5fkgwE71XK0q8ls17HJwOxbwF3d/C6ZmZmZi3NwU0zMzOzNiHpoYjYE9ic7LXYA5iKgRmdHT3ntojYGNgVOJIcOrQLGUCsPbgJIOnYiAhgB+BQ4AlgOeBosqR+aF9384h4BPgL8A+gP/AacC4ZPETSR2Xg0mHkUKFPyvZbgRubXvJ4cijThsB2ZHDx6rJtbeAk8n3+FDiNLLO/eTDXOiAiViivux6wIvA18ApwFPBC2fUq8v9uI2Bc4EOyZ+jekj4bnL/LzMzMrJXEL7TkMTMzMzPrciKiBzn05zJJPynVNjMzM7PuwT03zczMzKxLi4gRS+Zm1XrA2MDtnb8iMzMzM+ssztw0MzMzsy4tIhYBjgAuIYcLzQVsDDwLzC3pu9oWZ2ZmZmbDlXtumpmZmVlX9xrwJrAtma35MXA2sKsDm2ZmZmbdmzM3zczMzMzMzMzMrEtyz00zMzMzMzMzMzPrkhzcNDMzMzMzMzMzsy7JwU0zMzMzMzMzMzPrkhzcNDMzMzMzMzMzsy7JwU0zMzMzMzMzMzPrkhzcNDMzMzMzMzMzsy7JwU0zMzMzMzMzMzPrkhzcNDMzMzMzMzMzsy7p/wFopG5thc+kvAAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAABTcAAAObCAYAAACGurUMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8WgzjOAAAACXBIWXMAAA9hAAAPYQGoP6dpAADchUlEQVR4nOzdd5htZ1k34N+TSk+ooQQIivTeg4KhIx0BaUoVpEgRUIhIbxGQZgQEqdKRqkDovYQuUj9a6KGTUBMgz/fHu4bsDHPanJkzs8657+va18xebb97zZ611/qtt1R3BwAAAABgbvba6AIAAAAAAKyGcBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwCAdVVV966qz1bVL6uqq+q+G12mOZj21bvWaFuXq6q3VtUPpu1+cpr+/On5IWvxOgAAu5pwEwAgSVU9eAp5uqouuNHl2V1U1a2SPDXJr5I8JckjknxoF732YdPf8+G74vU2q6o6Q5I3JLlCkpdl/A2euaGFAgBYI/tsdAEAADZaVVWSv07SSSrJXZI8YEMLtfu4wdLP7v72hpZkfi6c5BdrsJ0rJDlbkgd392PXYHsAAJuGmpsAAMm1kxyS5AVJjk1y+6rab0NLtPs4Z5IINndcd3++u7++Bps65/TT3wAA2O0INwEARk3NJHl2khcnOUuSmy4uUFVHTU2cL7nSBqrqltP8Jy6bfqaqelxVfW7qc/K4qnp7VV17hW3cYdrGHarqulX1rmn5XljmJlX1oqr6f1X18+nxsalfyxXP7arqAlX1qqr68bT8B6rq+ouvt8I6B1fVkVX1lao6oap+WFWvr6rLb2NfLq3/8KncV5ueLzX572XLXWjq9/EbVXViVX23ql6yUtcA0/s4oqo+WlXfn8r1tap6VlUdvGzZ5yd55/T0YYuvX1WHLZZx6fmy9Q+Z5j1/+Xan6X9QVfeqqk9Nf9d3LSyz3X/zbezD3+tzc7HMVXXzqvpwVf2iqn5UVS+rqnMtfw8ZoX2SPG9hH9xhK6+71eb8VXVMVR2zhXm3rqp3VtVPqupX0z74p6raf0vvr6rOMv0NvzP9TT9TVXfcSvmuXVX/XVXfm5b/RlW9rqquucKy16mqN9boa/SEqvpyVT2hqg7c0vYBgHnRLB0A2KNV1UFJbpTk/3X3B6rq+CT3T3LXJC9fWPQFSa6T5HbT/OVuP/18/sK2z5vkXRm1Qt+b5Kgkp81oqn1UVf1Ndz97hW3dPMl1k7wpo2/E8y7MOyLJSUmOTvKtJAckuXpGv5aXT/JXy97fhZJ8IMkZM/pd/FSSP0jymiRv3MI+uUyStyQ5U5I3J3l1RuB7kyTvq6qbdveK6y541/TzDlP5H7HC61x32va+Sf47yZeSHJzkz5Ncv6qu1t0fX1jlz5PcLSO0/ECSE5NcNKNLgRtW1eW6+1vTsq+dft4+ybsXypMkx2yj7NvjqUmukrFP35jkt9N7Wu3ffEfdI+Nz+/qM93fFJLdMcsmqulR3n5DkJxn7/VJJbpzkdUk+Oa3/yayxqnpukjsm+WaSV02vf6Ukj0pyjaq6Vnf/ZtlqByZ5f8bf8r+S7J/kFkmeW1UndfcLFheuqkckeWiSn2X8jb+RUTP1ykn+MsnbFpZ9WJKHJ/lRkv9J8r0kl8jocuJ6VXVodx+/Jm8eANg43e3h4eHh4eHhscc+kjwoo6/NwxemfTQjQDz/wrRTZYQ1xybZZ9k2zp7kN0k+tmz6u6bt3GrZ9AMzwqVfJjloYfodprKclOS6WyjvH64wba+M8LWTXHHZvLdP0+++bPqfTdM7yR0Wpu+TETL+KsmfLlvnnBmB6neS7L+d+/dd45Tz96afMcmPk/wgyUWWzbtYRnj18WXTz7XS62Z0K/DbJM9YNv2w6f09fAtle/g0/7AV5h0yzXv+sunPn6Z/K8n5tvB+t/tvvo1910netYUyH5/k4svmvWSa9xfLpi99ru6wwmssvZ9DdmC/HZPkmC28xquTnHoLZb7PCu+vk/xHkr0Xpl8k4//psyv8nTvJV5Kca4VyHbzw+9WmZT+Q5MAtlPXJ2/N38PDw8PDw8NjcD83SAYA9VtXvBhI6KckLF2Y9PycPLJQk6e5fJXlFkoMyanAu+sske+fk5r+p0Xz9T5O8qrtftrhwd/8kycMyAtObrVC013X3USuVubu/vMK0kzJqEmaxbFV17oxanV9K8u/L1nlTFmq5Lbh+kj9M8q/d/e5l63w7yeMzwtxrrFS+HXC7jMDvYd392WWv8+mMLgIuXVUXWZj+rR41ErNs+bck+Ux+/++ynh7f3V9dnLCTf/Md9bTu/r9l05ZqhF5hDba/o+6TEUjeqbt/uWzeo5L8MMltV1jvF0nu192/XZowfR7en+TCVXW6hWXvNf28f59cQzcL631z4em9p593mfb94nLPzwiaVyoPADAzmqUDAHuyq2cEeW9eFpa8JMm/JLlDVf1Td/96mv78jMDz9hnNkZfcPsmvp/WWHDr9PGALfReedfp54RXmfXhLBa6qMyf5+yTXy2heftpli5xr4fdLTT8/OAWgy70vyfJ+CpfKfd4tlPuPpp8XzhaatW+npde55BZe5wILr/PZ5Hdh9G0zat5dMqP2594L65y4E+XZUSv9jXbmb76jPrrCtG9MP8+4BtvfblV1moy/xw+S3Hf8mX7PCVn5fX+xV24avvhefjb9fqWMGpcrBv/LHJrxP3mLqrrFCvP3S3LWqjpzd/9wO7YHAGxSwk0AYE921+nn8xcndvePquq/M2rY3TijL8D06JPz/yW5UVWdsbt/PPVPebEkr+3uHyxs5szTz2tNjy053QrTjl1pwWkQlI8kOV9GuPbCjP4Ef5NRC/I+GX0WLjlg+vndLbz2StOXyr1SILRopXLviKXXuctWlzrl6zwpyX0zmsW/OaNp+FItwTvklH2TrreV/kY78zffUT9ZYdpSf5Z7rzBvPZ0xo6bzWTNqp+6In2xh+krv5cAkP16hZuhKzpxxrbOt8pwuo1YpADBTwk0AYI9UVWfNGCAnSV5aVS/dwqJ3zRRuTl6Y5NEZg7c8MycPJPSCZesdN/28T3c/bQeL11uY/tcZweYjuvvhizOq6tCMcHPRUo24g7awvZWmL5X7xt39+m0XddWWXueS3f2pbS1cVWfLaGr86SRX7u6fLpt/61WUYak260rnxAduY92V/kY78zffTLa2X5Kxb36y8HzpfX+iuy+zTmXK9JpnrqpTb0fAeVySvbr7TOtYHgBgE9DnJgCwp7p9RtPUjyV5zhYe309yzao638J6L8wIf25fVfsmuXVGc9zFZupJ8qHp51XWsMznn36+aoV5f7rCtE9OPw+tqpXO+/5khWnrUe6V7Ojr/EHGuetbVgg2D57mL7fUj+OWajL+ePp57hXmXW47y7VoV+279bbF/VJV58/JNYKTJN39s4w+Ty9aVesZJn4oo4bodbdz2TNW1UXXsTwAwCYg3AQA9lRLzaHv0d1/vdIjYxCepUGHkiTd/Y0k78jo/+8+GU1xX7LQL+fSch9N8t4kf15Vd1qpAFV18alG4vY6Zvp52LLtXDrJ4csX7u6vZ4zeff4kf7Nsnevm9/vbTJLXJflykntW1fW2UO5Dp34Wd8bzMmriPayqfm8AnKraq6oOW5h0zPTzT6pq74XlTpcxkM5KtQyXmhufZwtlWOo3845V9bv1p4GYHrrNd7DMOv3NN8LnM2r93nixrFV16iRbqpH6pIybBc+duk84hao649SFw8741+nnv1TVuZbPXDbtydPPZ1fVOVdY9rRVdaWdLA8AsAlolg4A7HGm0OwCSf6vu7c4eE9G7c0HZ4RfD+vupX4AX5ARDD524flKbpMRhD6nqu6d5OiMQO/gJJfI6Kvz0CTf286ivzBjMKGnVNXVknwxY4CfGyR5dUZT+eXumTHy9NOnsPJTGbUcb5YRZN44JzdDTnf/uqr+PKNPyzdU1QcyaoD+IqMm3+Wn9c8xTVuV7v5hVd08yWuSfKiq3p5R+6+n1zk0o9/EU03LH1tVL0tyqySfrKq3ZNQgvFaSX01lvNSyl/lCRr+ct6qqXyf52rT9/+zur3X30VX1niRXTfLhqnpHRlP9G07vf6Uanduy1n/zXW76DDw1yUOSfKKqXpNx3XCtJN+eHsvXeW5VXTbJPZJ8uarenOTrSc6U0ZXCVTMC7bvtRLneUlWPTvJPST5XVa/NGHjooIxayB/K6Hs13f32qnpQkscl+WJVvTHJVzP62DxvRk3n92X7aoECAJuYcBMA2BMt1dr8j60t1N3HVNXbMkKdG2YEcckIEv8tyRmSfLq7P76F9b85BT73yggTb5vRRPrYjBHA/zXJ/21vobv721V1lSRHZIQ518moZXePJG/LCuFmd3926o/zsRmjw189I+C8acbo1TfOyX1zLq3zqaq6ZJL7ZQSnd8wIQL+T5BMZg7QsDp60KlMAdYkkD5jey1UyRjz/dkZAuLz5/Z2TfGV6n/fM6Dbg9Rm1LH+vqX53/7aqbpqxv26R5PQZNXHflxF0JuP9P2H6ea+MwPgfkrwlyV+s4j2t6d98Az0sI7y+S0a/s8cmeVmSh2cavX657r5nVb0pI8C8ZkbfnD/KCDmfkORFO1uo7n5IVX0wo//VGyQ5bUZQ/NGM8H9x2X+uqvdPy/5Jxt/4uIzA+1lJXrKz5QEANl51b6m/egAAdmdV9eKMmoYX6u4vbHR5AABgR+lzEwBgNzb1XXn2FaZfI6MG5GcFmwAAzJVm6QAAu7f9knyjqt6Z0YT9N0kumtHU/sSM5t0AADBLmqUDAOzGppHFn5LR1+bBSU6T0V/me5Ic0d2f2LjSAQDAzhFuAgAAAACzpM9NAAAAAGCW9Lm5DqqqkpwzyU83uiwAAAAAMFOnT/Lt3krTc+Hm+jhnkm9udCEAAAAAYOYOTvKtLc0Ubq6PpRqbB0ftTQAAAADYUafPqDy41WxNuLm+ftrdx290IQAAAABgTkavj9tmQCEAAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJb22egCAAAAwCEPekNvdBk2o2OOuH5tdBkANjM1NwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJZmH25W1T2r6piq+lVVHV1VV9jKsnepqvdW1Y+nx9uWL1/DI6vqO1X1y2mZP1r/dwIAAAAA7IhZh5tVdcskT0ryiCSXSfK/Sd5cVWfbwiqHJXlpkqslOTTJN5K8parOtbDMPyS5d5K7Jblikp9P2zzVerwHAAAAAGB1Zh1uJrlfkmd39/O6+7MZgeQvktxppYW7+7bd/fTu/mR3fz7JX2fsg2sko9ZmkvsmeXR3v667P5XkdknOmeQmWypEVe1fVWdYeiQ5/Zq9QwAAAABgRbMNN6tqvySXTfK2pWndfdL0/NDt3Mxpkuyb5EfT8/MlOfuybR6X5OhtbPPwJMctPL65na8PAAAAAKzSPhtdgJ1wliR7J/nusunfTXKh7dzGPyf5dk4OM8++sI3l2zx7tuxxGc3jl5w+Ak5gD3XIg97QG12GzeqYI65fG10GAACA3cmcw82dUlUPSnKrJId19692ZlvdfUKSExa2vZOlAwAAAAC2ZbbN0pP8IMlvkxy0bPpBSY7d2opV9YAkD0py7alfzSVL6+3wNgEAAACAXWu24WZ3n5jkY5kGA0qSqloaHOiDW1qvqv4hyUOSXLe7P7ps9lczQszFbZ4hY9T0LW4TAAAAANj15t4s/UlJXlBVH03y4YyRzk+b5HlJUlUvTPKt7j58ev7AJI9Mcpskx1TVUj+aP+vun3V3V9VTkvxTVX0xI+x8VEa/nK/dVW8KAAAAANi2WYeb3f3yqjprRmB59iSfzKiRuTQg0HmSnLSwyt2T7Jfkv5Zt6hFJHj79/viMgPRZSQ5M8r5pmzvVLycAAAAAsLZmHW4mSXcfmeTILcw7bNnzQ7Zje53kodMDAAAAANikZtvnJgAAAACwZxNuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJil2YebVXXPqjqmqn5VVUdX1RW2suxFq+pV0/JdVfddYZmHT/MWH59f1zcBAAAAAOywWYebVXXLJE9K8ogkl0nyv0neXFVn28Iqp0nylSQPSnLsVjb9mSTnWHj8yVqVGQAAAABYG7MON5PcL8mzu/t53f3ZJHdL8oskd1pp4e7+SHf/fXe/LMkJW9nub7r72IXHD9a+6AAAAADAzphtuFlV+yW5bJK3LU3r7pOm54fu5Ob/qKq+XVVfqaoXV9V5tlGW/avqDEuPJKffydcHAAAAALZhtuFmkrMk2TvJd5dN/26Ss+/Edo9Ocock101y9yTnS/LeqtpaYHl4kuMWHt/cidcHAAAAALbDnMPNddHdb+ruV3b3p7r7zUmul+TAJH+xldUel+SAhcfB615QAAAAANjD7bPRBdgJP0jy2yQHLZt+ULY+WNAO6e6fVNX/S3L+rSxzQhb68KyqtXp5AAAAAGALZltzs7tPTPKxJNdYmlZVe03PP7hWr1NVp0vyh0m+s1bbBAAAAAB23pxrbibJk5K8oKo+muTDSe6b5LRJnpckVfXCJN/q7sOn5/sluci07n5JzlVVl0rys+7+0rTME5P8d5KvJTlnkkdk1BB96a55SwAAAADA9ph1uNndL6+qsyZ5ZMYgQp9Mct3uXhpk6DxJTlpY5ZxJPrHw/AHT491JDpumHZwRZJ45yfeTvC/Jlbr7++vzLgAAAACA1Zh1uJkk3X1kkiO3MO+wZc+PSbLVDjG7+1ZrVTYAAAAAYP3Mts9NAAAAAGDPJtwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABglmYfblbVPavqmKr6VVUdXVVX2MqyF62qV03Ld1Xdd2e3CQAAAABsjFmHm1V1yyRPSvKIJJdJ8r9J3lxVZ9vCKqdJ8pUkD0py7BptEwAAAADYAPtsdAF20v2SPLu7n5ckVXW3JNdPcqckRyxfuLs/kuQj07K/N3812wQAgM3gkAe9oTe6DJvVMUdcvza6DADA+phtzc2q2i/JZZO8bWlad580PT90V26zqvavqjMsPZKcfjWvDwAAAABsv9mGm0nOkmTvJN9dNv27Sc6+i7d5eJLjFh7fXOXrAwAAAADbac7h5mbyuCQHLDwO3tjiAAAAAMDub859bv4gyW+THLRs+kHZwmBB67XN7j4hyQlLz6t06QMAAAAA6222NTe7+8QkH0tyjaVpVbXX9PyDm2WbAAAAAMD6mHPNzSR5UpIXVNVHk3w4yX2TnDbJ0kjnL0zyre4+fHq+X5KLTOvul+RcVXWpJD/r7i9tzzYBAAAAgM1h1uFmd7+8qs6a5JEZA/58Msl1u3tpQKDzJDlpYZVzJvnEwvMHTI93JzlsO7cJAAAAAGwCsw43k6S7j0xy5BbmHbbs+TFJttkh5ta2CQAAAABsDrPtcxMAAAAA2LMJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZ2mejCwCwqx3yoDf0RpdhszrmiOvXRpcBAAAAtpeamwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWVhVuVtV5qupPlk27ZFW9sKpeXlU3WZPSAQAAAABswT6rXO9pSU6X5JpJUlUHJXlnkv2S/DTJzavqFt396jUpJQAAAADAMqttln6FJG9deH67JKdOcskk50ry9iQP2LmiAQAAAABs2WrDzTMl+d7C8xskeXd3f7m7T0ry6iQX2tnCAQAAAABsyWrDze8nOW+SVNWBSa6U5M0L8/fJ6pu8AwAAAABs02oDyLcluXdVHZ/ksIyQ9LUL8y+S5Bs7VTIAAAAAgK1Ybbj5oCQXSPLEJCcmeUB3fzVJqmr/JH+R5CVrUkIAAAAAgBWsKtzs7u8m+eOqOiDJL7v7xIXZeyW5RtTcBAAAAADW0U71i9ndx60w7ZdJ/ndntgsAAAAAsC2rHVAoVXWeqnpmVX2hqn5cVVedpp+lqp5WVZdeu2ICAAAAAJzSqmpuVtVFkrw3Ixw9Osn5l7bV3T+oqj9Jctokd16jcgIAAAAAnMJqa24+PslPMgYV+ssktWz+G5JcZfXF2n5Vdc+qOqaqflVVR1fVFbax/C2q6vPT8v9XVddbNv/5VdXLHket77sAAAAAAHbUasPNqyZ5Rnd/P0mvMP/rSc616lJtp6q6ZZInJXlEkstk9PX55qo62xaWv3KSlyZ5TpJLJ3ltktdW1cWWLXpUknMsPG69HuUHAAAAAFZvteHmXkl+sZX5Z01ywiq3vSPul+TZ3f287v5skrtN5brTFpa/T5KjuvsJ3f257n5Iko8n+dtly53Q3ccuPH68bu8AAAAAAFiV1Y6W/vEk10/y9OUzqmqfJLdK8qGdKNc2VdV+SS6b5HFL07r7pKp6W5JDt7DaoRk1PRe9OclNlk07rKq+l+THSd6R5J+6+4dbKcv+SfZfmHT67XkPsCWHPOgNK9WIJskxR1x/eTcYAAAAwB5qtTU3H5fkulX1jCRLTboPqqprJnlLkgsnOWINyrc1Z0myd5LvLpv+3SRn38I6Z9+O5Y9Kcrsk10jywCR/muRNVbX3VspyeJLjFh7f3I7yAwAAAAA7YVU1N7v7TVV1hyRPTXLXafKLMgYWOj7J7br7PWtSwl2su1+28PT/qupTSb6c5LAkb9/Cao/LKWuEnj4CTgAAAABYV6ttlp7u/s+qenWSayc5f0Yt0C8neXN3/3SNyrc1P0jy2yQHLZt+UJJjt7DOsTu4fLr7K1X1g4z3uGK42d0nZKGP0SqtZgEAAABgva063EyS7v55ktesUVl29LVPrKqPZTQff22SVNVe0/Mjt7DaB6f5T1mYdq1p+oqq6uAkZ07ynZ0uNAAAAACwZlYVblbVebZnue7++mq2vwOelOQFVfXRJB9Oct8kp03yvCSpqhcm+VZ3Hz4t/9Qk766q+yd5Q8bAR5fL1LS+qk6X5GFJXpVRm/MPkzw+yZcyBh4CAAAAADaJ1dbcPCbJ9ozmvLVBeHZad7+8qs6a5JEZgwJ9Msl1u3tp0KDzJDlpYfkPVNVtkjw6yWOTfDHJTbr709Miv01yiSS3T3Jgkm9nDJD0kKnpOQAAAACwSaw23LxTfj/c3DvJIRkjjX8vyb+tvljbr7uPzBaaoXf3YStMe2WSV25h+V8muc5alg8AAAAAWB+rHS39+VuaV1X/nOToJAesskwAAAAAANu011pvcBpk6HlJ/m6ttw0AAAAAsGSnRkvfir0y+sAEAADYLRzyoDdsz7gDe5xjjrh+bXQZANhzrWm4WVVnSHLVJH+f5BNruW0AAAAAgEWrCjer6qRsebT0SvL1JPdYbaEAAAAAALZltTU3H5nfDzc7yY+TfDnJW7r7NztTMAAAAACArVntaOkPX+NyAAAAAADskDUfLR0AAAAAYFfYrpqbVfXcVWy7u/vOq1gPAAAAAGCbtrdZ+tWz5QGEtmRHlwcAAAAA2G7bFW529yHrXA4AAAAAgB2iz00AAAAAYJaEmwAAAADALK063KyqP6uqt1bVD6vqN1X12+WPtSwoAAAAAMCiVYWbVXWzJP+T5KAkL5u289Lp918m+VSSR65RGQEAAAAAfs9qa24enuTDSS6d5GHTtOd2922TXCzJOZJ8deeLBwAAAACwstWGmxdJ8rLu/m2S30zT9k2S7j4mydOTPHCnSwcAAAAAsAWrDTd/keTEJOnunyQ5IaO25pLvJjnfTpUMAAAAAGArVhtufiGj9uaSTyb5q6rap6pOleQ2Sb6+k2UDAAAAANii1Yabr0ly46raf3r+mCSHJflJku8nuUqSI3a2cAAAAAAAW7LPalbq7icmeeLC8/+pqsOS/HmS3yZ5Q3e/cy0KCAAAAACwklWFmyvp7vcmee9abQ8AAAAAYGtW1Sy9ql5RVTddaJYOAAAAALBLrbbPzT9O8qok36uq/6yqG1TVvmtYLgAAAACArVptuHlwxgBCL0pyrSSvT/LdqnpOVV27qvZeo/IBAAAAAKxoVeFmD+/p7nsmOWdGwPnKJDdMclSSY6vqmWtXTAAAAACAU1ptzc3f6e6Tuvvt3f03Sc6R5G+S7JfkLju7bQAAAACALVmT0dKr6hxJbpHklkmuNE3+wFpsGwAAAABgJasON6vqbElunhFo/nFGLdAPJ3lAkld097fWpIQAAAAAACtYVbhZVW9PctUkeyf5ZJIHJ3l5dx+zZiUDAAAAANiK1dbcPFuSR2QEml9cw/IAAAAAAGyXVYWb3X3xtS4IAAAAAMCO2OnR0gEAAAAANoJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALK1qtPQlVbV/ksskOVuS93f3D9akVAAAAAAA27DqmptVde8k30nyviSvTnKJafpZquoHVXWntSkiAAAAAMDvW1W4WVV3TPKUJEcluXOSWpo31d58R5JbrUH5AAAAAABWtNqam/dP8rruvk2S/15h/seSXHTVpQIAAAAA2IbVhpvnT/Kmrcz/UZIzr3LbAAAAAADbtNpw8ydJzrKV+RdJcuwqtw0AAAAAsE2rDTffmOSuVXXg8hlVddEkd0ny+p0oFwAAAADAVq023PynJHsn+XSSRyfpJLevqhcl+WiS7yV55JqUEAAAAABgBasKN7v720kumzFa+i0zRkv/qyQ3TPLSJFeaRk0HAAAAAFgX+6x2xe7+XpK/TvLXVXXWjKD0+9190loVDgAAAABgS1Ydbi7q7u+vxXYAAAAAALbXqpqlV9Wjq+qTW5n/iap62KpLBQAAAACwDasdUOjmSd60lflvzOiLEwAAAABgXaw23DxPki9vZf5Xk5x3ldsGAAAAANim1YabP8vWw8vzJfnVKrcNAAAAALBNqw0335Xkb6rqXMtnVNW5k9w1yTt3olwAAAAAAFu12tHSH5Lkw0k+U1XPSfKZafrFktwpSU3LAAAAAACsi1WFm939haq6SpJ/TfJ3y2a/J8m9u/tzO1s4AAAAAIAtWW3NzXT3p5L8aVWdJckfTJO/0t0/WJOSAQAAAABsxarDzSVTmCnQBAAAAAB2qVWHm1W1d5LrZNTaPGNGP5uLursftRNlAwAAAADYolWFm1V1uSSvSnJwfj/UXNJJhJsAAAAAwLrYa5XrPT3JqZPcJMmZunuvFR57r1kpAQAAAACWWW2z9EskeXB3//daFgYA2LpDHvSG3ugybFbHHHH9LbUmAQAAdlOrrbn5zWy5OToAAAAAwLpbbbj5z0nuUlVnWMvCrEZV3bOqjqmqX1XV0VV1hW0sf4uq+vy0/P9V1fWWza+qemRVfaeqfllVb6uqP1rfdwEAAAAA7KjVNks/fZKfJflSVb0syTeS/HbZMt3dT96Zwm1LVd0yyZOS3C3J0Unum+TNVXXB7v7eCstfOclLkxye5H+S3CbJa6vqMt396Wmxf0hy7yS3T/LVjEGR3lxVF+nuX63n+wEAAAAAtt9qw80nLvz+t1tYppOsa7iZ5H5Jnt3dz0uSqrpbkusnuVOSI1ZY/j5JjuruJ0zPH1JV18p4D3erqsoISB/d3a+btnm7JN/NGDzpZev3VgAAAACAHbHacPN8a1qKVaiq/ZJcNsnjlqZ190lV9bYkh25htUMzanouenNGcJmM93X2JG9b2OZxVXX0tO6K4WZV7Z9k/4VJp9/uNwIAAAAArEp1z3PQ1ao6Z5JvJblyd39wYfrjk/xpd19xhXVOTHL77n7pwrR7JHlYdx80NVt/f5Jzdvd3FpZ5RUYz+1tuoSwPT/KwFWYd0N3Hr+oNbnJG612ZkXoB5s3325atxXec/btlziGA9eYYvLK1Ov7avyuzf9fX7n7+MI31c1y2ka+tdkChpRc5V1XduqruU1UHT9P2rqozVdXeO7PtmXlckgMWHgdvbHEAAAAAYPe3qnBzGlH8SRkD7rw4o6n3BabZp0tyTJJ7rUUBt+IHGYMYHbRs+kFJjt3COsduY/ljF6Zt7zbT3Sd09/FLjyQ/3UbZAQAAAICdtNqam3+fMTjPE5NcK8nvqsF293FJXp3kZjtduq3o7hOTfCzJNZamVdVe0/MPbmG1Dy4uP7nWwvJfzQgxF7d5hiRX3Mo2AQAAAIANsNoBhe6S5IXd/Y9VdeYV5n8qyZ+tvljb7UlJXlBVH03y4YyRzk+bZGn09Bcm+VZ3Hz4t/9Qk766q+yd5Q5JbJblckrsmo1PNqnpKkn+qqi9mhJ2PSvLtJK/dBe8HAAAAANhOqw03z53kA1uZ//MkZ1jltrdbd7+8qs6a5JEZo5x/Msl1u/u70yLnSXLSwvIfqKrbJHl0kscm+WKSm3T3pxc2+/iMgPRZSQ5M8r5pm79a33cDAAAAAOyI1Yab38sIOLfkskm+vspt75DuPjLJkVuYd9gK016Z5JVb2V4neej0AAAAAAA2qdX2ufnqJHerqj9YmNZJUlXXTnKHbCVABAAAAADYWasNNx+W5DsZzcBfmBFsPrCq3pfkTRl9bj52LQoIAAAAALCSVYWb04joV8ron/JcSX6V5E8z+qh8RJKrdPcv1qiMAAAAAAC/Z7V9bqa7f5kxMM+j1644AAAAAADbZ7XN0gEAAAAANtSqam5W1XO3Y7Hu7juvZvsAAAAAANuy2mbpV880OvqCvZOcY/r5/SQ/34lyAQAAAABs1arCze4+ZKXpVbVvkr9Jct8k11p1qQAAAAAAtmFN+9zs7l9395FJ3pLkyLXcNgAAAADAovUaUOh/k1x1nbYNAAAAALBu4ea1kvxinbYNAAAAALDq0dIfuoVZB2bU2LxMkiNWWSYAAAAAgG1a7WjpD9/C9B8n+XKSuyV59iq3DQAAAACwTasdLX29mrMDAAAAAGwXISUAAAAAMEvbVXOzqs6zmo1399dXsx4AAAAAwLZsb7P0Y5L0Kra/9yrWAQAAAADYpu0NN++4rqUAAAAAANhB2xtu/jjJR7v72+tZGAAAAACA7bW9Awq9JslhS0+q6itVdaN1KREAAAAAwHbY3nDzp0kOXHh+SJLTrXVhAAAAAAC21/Y2S/9wkgdX1UFJjpumXa+qzr6Vdbq7n7xTpQMAAAAA2ILtDTfvkeSFSR4yPe8kt5keW9JJhJsAAAAAwLrYrnCzu7+U5MpVdaokZ0tyTJL7JnndupUMAAAAAGArtrfmZpKku3+V5OtV9Ygk7+jur61PsQAAAAAAtm6Hws0l3f2ItS4IAAAAAMCOWFW4mSRVdeEkd0zyB0nOmKSWLdLdfY2dKBsAAAAAwBatKtysqr9K8rwkv07yhSQ/XmmxnSgXAAAAAMBWrbbm5sOTfCLJn3X3D9auOAAAAAAA22evVa53ziTPFWwCAAAAABtlteHmpzICTgAAAACADbHacPN+Se5cVVdey8IAAAAAAGyv1fa5+cAkxyV5b1V9NsnXk/x22TLd3TfemcIBAAAAAGzJasPNSyTpjFDzdEkussIyvdpCAQAAAABsy6rCze4+ZI3LAQAAAACwQ1bb5yYAAAAAwIbarpqbVXWeJOnury8+35al5QEAAAAA1tr2Nks/JklX1am7+8Sl59ux3t6rLBcAAAAAwFZtb7h5p4ww89fLngMAAAAAbIjtCje7+/lbew4AAAAAsKsZUAgAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzNI+G10A5umYI65fG10GAAAAAPZsam4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS7MNN6vqTFX14qo6vqp+UlXPqarTbWOdU1XVv1XVD6vqZ1X1qqo6aNkyvcLjVuv7bgAAAACAHTXbcDPJi5NcNMm1ktwgyVWTPGsb6zw5yQ2T3CLJnyY5Z5JXr7DcHZOcY+Hx2jUpMQAAAACwZvbZ6AKsRlVdOMl1k1y+uz86TbtXkjdW1QO6+9srrHNAkjsnuU13v2Oadsckn6uqK3X3hxYW/0l3H7sD5dk/yf4Lk06/w28KAAAAANghc625eWhGAPnRhWlvS3JSkituYZ3LJtl3Wi5J0t2fT/L1aXuL/q2qflBVH66qO1VVbaM8hyc5buHxze1+JwAAAADAqsw13Dx7ku8tTuju3yT50TRvS+uc2N0/WTb9u8vWeWiSv8ho7v6qJE9Pcq9tlOdxSQ5YeBy8zXcAAAAAAOyUTdUsvaqOSPLAbSx24fUsQ3c/auHpJ6rqtEn+PsnTtrLOCUlOWHq+7YqeAAAAAMDO2lThZpJ/SfL8bSzzlSTHJjnb4sSq2ifJmaZ5Kzk2yX5VdeCy2psHbWWdJDk6yUOqav8pxAQAAAAANoFNFW529/eTfH9by1XVB5McWFWX7e6PTZOvntHM/ugtrPaxJL9Oco2M5uapqgsmOU+SD27l5S6V5MeCTQAAAADYXDZVuLm9uvtzVXVUkmdX1d0yBgo6MsnLlkZKr6pzJXl7ktt194e7+7iqek6SJ1XVj5Icn+Rfk3xwaaT0qrphRk3ODyX5VUa/m/+Y5Im79h0CAAAAANsyy3BzctuMQPPtGaOkvyrJvRfm75vkgklOszDt7xaW3T/Jm5PcY2H+r5PcM8mTk1SSLyW5X5Jnr8s7AAAAAABWbbbhZnf/KMlttjL/mIyAcnHarzLCy3tuYZ2jkhy1dqUEAAAAANbLXhtdAAAAAACA1RBuAgAAAACzNNtm6QAAa+2YI65f214KAADYLNTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZmmfjS4AAAAAsL6OOeL6tdFlAFgPam4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzNNtws6rOVFUvrqrjq+onVfWcqjrdNta5a1W9a1qnq+rAtdguAAAAALDrzTbcTPLiJBdNcq0kN0hy1STP2sY6p0lyVJLHrvF2AQAAAIBdbJ+NLsBqVNWFk1w3yeW7+6PTtHsleWNVPaC7v73Set39lGnZw9Zyu1W1f5L9FyadfhVvCwAAAADYAXOtuXlokp8sBZCTtyU5KckVN2C7hyc5buHxzZ0oAwAAAACwHeYabp49yfcWJ3T3b5L8aJq3q7f7uCQHLDwO3okyAAAAAADbYVOFm1V1xDTQz9YeF9roci7X3Sd09/FLjyQ/3egyAQAAAMDubrP1ufkvSZ6/jWW+kuTYJGdbnFhV+yQ50zRvtdZruwAAAADAGttU4WZ3fz/J97e1XFV9MMmBVXXZ7v7YNPnqGTVRj96JIqzXdgEAAACANbapmqVvr+7+XJKjkjy7qq5QVX+c5MgkL1sa0byqzlVVn6+qKyytV1Vnr6pLJTn/NOniVXWpqjrT9m4XAAAAANgcZhluTm6b5PNJ3p7kjUnel+SuC/P3TXLBJKdZmHa3JJ9I8uzp+Xum5zfage0CAAAAAJvApmqWviO6+0dJbrOV+cckqWXTHp7k4TuzXQAAAABgc5hzzU0AAAAAYA8m3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS/tsdAEAANgzHHPE9WujywAAwO5FzU0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLsw03q+pMVfXiqjq+qn5SVc+pqtNtY527VtW7pnW6qg5cYZljpnmLjwet2xsBAAAAAFZltuFmkhcnuWiSayW5QZKrJnnWNtY5TZKjkjx2G8s9NMk5Fh7/ulMlBQAAAADW3D4bXYDVqKoLJ7lukst390enafdK8saqekB3f3ul9br7KdOyh23jJX7a3cfuQHn2T7L/wqTTb++6AAAAAMDqzLXm5qFJfrIUbE7eluSkJFdcg+0/qKp+WFWfqKq/r6pthcCHJzlu4fHNNSgDAAAAALAVs6y5meTsSb63OKG7f1NVP5rm7YynJfl4kh8luXKSx2U0Tb/fVtZ5XJInLTw/fQScAAAAALCuNlW4WVVHJHngNha78HqWobsXQ8pPVdWJSf69qg7v7hO2sM4JSX43r6rWs4gAAAAAQDZZuJnkX5I8fxvLfCXJsUnOtjhxajp+pmneWjo6Yz8dkuQLa7xtAAAAAGCVNlW42d3fT/L9bS1XVR9McmBVXba7PzZNvnpGH6JHr3GxLpXRl+f3trEcAAAAALALbapwc3t19+eq6qgkz66quyXZN8mRSV62NFJ6VZ0ryduT3K67PzxNO3tGn5znnzZ18ar6aZKvd/ePqurQjAGJ3pnkpxkDFz05yYu6+8e77h0CAAAAANsy19HSk+S2ST6fEWC+Mcn7ktx1Yf6+SS6Y5DQL0+6W5BNJnj09f8/0/EbT8xOS3CrJu5N8JsmDM8LNxe0CAAAAAJvALGtuJkl3/yjJbbYy/5gktWzaw5M8fCvrfDzJldakgAAAAADAuppzzU0AAAAAYA8m3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmaZ+NLgAAAADAnB1zxPVro8sAeyo1NwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmSbgJAAAAAMxSdfdGl2G3U1VnSHJckgO6+/iNLg8AAAAAzMn25mtqbgIAAAAAsyTcBAAAAABmSbgJAAAAAMyScBMAAAAAmCXhJgAAAAAwS8JNAAAAAGCWhJsAAAAAwCwJNwEAAACAWRJuAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEmwAAAADALAk3AQAAAIBZEm4CAAAAALMk3AQAAAAAZkm4CQAAAADMknATAAAAAJgl4SYAAAAAMEvCTQAAAABgloSbAAAAAMAsCTcBAAAAgFkSbgIAAAAAsyTcBAAAAABmaZ+NLsBu7vRVtdFlAAAAAIC5Of32LCTcXB9LO/+bG1oKAAAAAJi30yc5fkszq7t3YVn2DDWqa54zyU83uix7gNNnhMgHx/5eD/bv+rJ/15f9u/7s4/Vl/64v+3d92b/ry/5dX/bv+rJ/15f9u77s313v9Em+3VsJMNXcXAfTDv/WRpdjT7DQ7P+n3b3FFJ/VsX/Xl/27vuzf9Wcfry/7d33Zv+vL/l1f9u/6sn/Xl/27vuzf9WX/boht7mcDCgEAAAAAsyTcBAAAAABmSbjJ3J2Q5BHTT9ae/bu+7N/1Zf+uP/t4fdm/68v+XV/27/qyf9eX/bu+7N/1Zf+uL/t3EzKgEAAAAAAwS2puAgAAAACzJNwEAAAAAGZJuAkAAAAAzJJwEwAAAACYJeEmAAAAADBLwk0AAAAAYJaEm+y2quo+VXWljS7HnqyqaqPLAAAAm1VVuSYH2EnV3RtdBlhzVXWxJO9P8o0kr0nyjO7+9saWas9RVWfr7u9Nv++d5KR2sNmqqqru7qWfG10eALbNMRvYUVW1V3efNFUCOOvSOTMAq+cuEbul7v50krskOSbJHZO8sqruUlX7bGjB9hwfrar3V9Ufdvdvp9Bu740u1GazeKd+ebDpLj6w1hxXdt5ii4Sq2k+wuXvQ0mTHOKfbOd190vTrw5P8Z1WdJnGMhh2x9P9SVXtV1eWr6ryO5Xs2B1B2W939iiR/keTxSfZN8tAkr6qq625owXZzVXWmJP+Z5GxJvlhVT6uqfbr7t9N8x53J0sltVf1VVT0+yZFV9ZfL5vmSnoFlgYfP+Cos7UP7b/0sHFfuUFW3q6pTbXSZZmjpc3qLJM+oqmucYqZj9qa3cEG8X1WdNxk3GDe2VJvbwj47S5IsndOx0yrJtZLcPzlF6Alsv39M8ookN9vMx3LnB+tPs3R2W0tNPqbfD0lyzyQ3zAg635rkyKmGJ2usqvZNcoUkt01y64yTt/t193On+ZXsuRcTU42Hvbv7xKq6e5InJTkxyc+SHJjkm0nu291vWlhe0/5NaKE7gaUmZjdKcuMk50zyziTvSvKp7v7VRpZzs6qqvRdufJwu47Dw8w0u1m5pusn0m6q6QZLnJPlCkmuv9NnU1HplS5/XqvqDjK5v3pvk8O7+8gYXjR2wcNx+aJJrJPn37n7J8vkbV8LNZeH77XRJXpbk9Elu0t0/3uCi7Raq6tFJ7pfk8CRHJumM78Ld+jO4cDw9IMllkvxxktMmeV2Sr3X3dza0gGxqC5+fCyf5SJJnJHlqd39zmr9vktN093EbULbFlnj7JzmTz/OuIdxkt7Z0p3kh5LxqkrsnuXKSn2Tc5Xlmd/9wo8q4u1l2QD9jkusn+YckF8v48rl/d79vmv+7AHpPUFV3S/K27v7S9Px0Sf5fkqOSPCQjeL9OktslOTQjhL/H0oWzC67NaeHC77JJPpDkp0m+kuTSSY5N8oIk/9Xdn9y4Um5uVfUvSQ7LCIXfnLHPPunieW0sOy5/JeNz+sju/n/TtAOSnCMjtPhkd/96wwo7A1X1xiQHZNyE+sg07dRJbpbku0ne392/2NO+4+Zg4YL4sIwQ5d+TPGYjLoDnYuE77jlJrpTk+d39hGXLOD/ZQQv79bwZN5wunlHz7H0bXLRdqqpemuS6SU5KckKSs2f8Xz64u3+0kWVj86uq1yQ5a5K/7u7PT2HixZI8Icn+Sf43yWOXQs9dUJ6l/+v9k9whyV8nOWOSn2d8rl/mc71+NP1itzWdaJ20eGHR3e/p7lsneXCSH2QEna+oqjtuVDl3N1NtiKW+TW+U0efp3km+nOQiSd5TVa+uqnMshM67fd9NVXVwkqcm+XRVPWbaR7/NCMFe2d3f6u5juvvfM74I/ynJBZJ8oaqeMV2QuXDYJKrqulV1t6o658Ix5lEZNbn+rLuvkPH3+0BGc5nnV9U9lppAcopmjndP8ncZtZffmeTqSd6Q5IlVdaWlvsjYeVV1pySnSvLchWDzGknekeSzGWHP3TWd2rKqunRGDaMXJvn0NO1qGfvuBRnh/Guq6gDB5uaz0Jz6SRk3EJ/V3cdV1b5VdZHp+/YJVXXRDSzmprFwoX7JJLfJqB31b9O8/arqVlX1tCT3qao/3MiyzlAnSXd/LeN8+bNJ/mupq4vduYuWpfP+qrpPkhskeWzGOdM1MwLOvTPOkWGLpmurCyT5cHd/fpp8lySvTHL+jM/RnTJaEu5qT0nyuIxQ8/XTzyOTXHXxHGt3/j/fCHYmu52Fg8Spq+pyVfXPVfWQGv2LXSZJuvtFSW6e5F8zal88s6quvUFF3q1MJ8K/qaqLJ3l2kqMzwp4/SvJnGXfSrp7km1V1eLLH9N30vSR3TvKmJH+fEfbeLuM4fEKS1NT/XXd/LmM//VWSpyf5m2lZNo+HZfxtnlhV16uqs2f8jd+/VJOru7/a3bfMCEJOyjipecZ0MbjbB/rb0iePFHuzjH1zvYzP/LWSPDHJTZO8Jsn9q+oCZUC4VVu4MXLOJL9J8p0kqao/zwh5Tpex7z+b5IgkF96AYm5ay/5fT58RSnytu39ZVVfKuHF1gYxaGn+T0dT5Tru6nKxs+cVjVR2a5LwZ38dLXQrcLsl/ZwR490jyzqq68q4s52a0ENDfPmOQzndPtZIPSvKYJC9OcsuM48g/uVDfsoVA7xzJKbtm6u5fZDRLPyHJ4VV14O58c2SqPb1vxvHyBRm1gX+Y8b3/8yRPX6pNXVX/UlVn27jSsol9O8kvk5y/qg6pqtskeWSSzyS5VHdfKeM69Dq1C/oYX7gZdImMa77HJrlad98vo1LVRzK6quqp9Z5+dteYLyB2OwsHiccmeWNGjaDDkzw/yQur6sFTbasfd/c/Z5zQPqK737IhBd7NLOz/e2c0z3tRd399mve+JI+Y5lWSx1TViVV1/Q0p7C7U3SdOofrdMzqO/36Sf0lyuYzmOOnuX1XVPjX6xvv1tL8eluSa3f28jSo7K7pORg3w62fU4LpHxiBav+u/cKoJtFd3f7C7L5MRdlw34++5JwT6WzUFm3tnBG2/nI7Jv+3uL2TUgr12krdn1Hx9fZJbbVhhZ6aqzlZVZ11h1teTHJzkelX1t0lelHFx8Jfd/eKMz/SvM0LQPVpVnaGqrp787kJ86Zz5qxnd2jy+qh6c5FVJjkty1+7+z4zagF/OOB6wCUwXm3sv1Jb5VZJTT/O6qm6X5NFJvpbR9981MrqJudZGlHeT+mGSs2R0tZIkT844Jj85ybkz+oy8fUYLHVYwHUdOneT9VfX5qnpWVT2+qq5To9/A/8uoxXiFJC9aCkF3Y2fNdA7Q3d+vqnNlXK89Lcnnkt/VlL9zxv8k/M5SC82Mm1I3yGh+/qIk/5PkQd39o6o6fcZxfd+Mz9q6WrgGvkXGecBbpu+YP5nK+MSM87AkeVBVvULrpLUl3GS3snBX9IYZIdILM/oHOiSjRsW+GeHag6rqDEnS3Z/t7sduRHl3Jys0Y/xBRg2Xpf4il4KeX3T3CzNOhD+V0Wz3J7uyrBuhJt19bHf/a8bJ2tMzmqU/oKpeUlUHdfdvppqv+03L/7i737GxpWe57j6+ux+X5FIZTaj/KSO4vOdSbZ8poD6pqvabnj8/4+TqPsme2xSlTu4Lubv7N0nekuT4xWWmfffRJHfLqNnZGYNtsX3+J8mbq+paiyfO07H3uRk1wx+fse/v3d0fmb4/z5cRbv5mA8q82dw/yduq6vlV9YcLFy3HZvy//2Za5qtJ7rJwg/SgJPtkfAeygarqglX1xKo63XTjZKmm3JcyBnt7WlW9J6O/w7ckuU+PPq6/meSLSc66wrnNnuqjGcfgN1fVRzKOy09K8sDuPjHjJsn3Mz7/LLPwfX/ujO5X3pcR2N0843j9/oxA76kZ+/J6GQOhLq2/O34Of5TkNEnOPD1/TMb/3n929wnTtEtmHEsdTzmFpeN5dz8i43/phRnXVnfo7s9Miy0NVPWe7v75Ljzv/mGSM3T3p6bnT8u4VnjbdI136oxsYt+M8wXWiAGF2C1V1dszLobv0N3fnEKing5qT8uoZXW/7n5KVe3bBk9Ytam/k+8u34dVdYskL8+4CPyXpROVqtqvxyjh98xo/nXHpb7f9gRLn8WF51fPqD385xmf2cd392MW5v9uNGk2r+mu7IMyLkg+mNEn2du7+7vT/L2S7DN99g0ykmQ6Btwk4wbUaTOap726VxjgrarO1FMH7Mv/hzilqfn+X2SE6JfOaDb6lCSf7+4TpovkC2XUznzfwrH5whkX1gd09xU3ouybSVVdJaNm2nUz+il9TsagBL+a5l8k44L7F939s2na+TKOAzdPco4p9GGDVNU/ZdzQ/kzGKLrPWZh3lYw+wQ+e5v9Td/98mnftjBpAD+zu5+3Jx5zF915VN8togXDGjGDz9dN32gFJ7ptxDL9YGyzjFOrkQawunlF762lT0FI5uabr1TOC4QtltOg5KSP8+MfuPmIDir2uFq7LHpjkb5P8R8b1wl9298unZc6b8Z10cHdfbuNKy2Y0HXdOneTEhfPD351fV9UVMm5AHprkfNP/4C45llfVTTIGLf6zJH+UcQ529SRHT+W4dEaL0rd19/3Xuzx7EuEmu52qOjCjivrPu/u607RKstd0QNkvyXuSnKq7L7VhBd0NTE1m3p/kP5Mc0d2/XJh3YEZzvSsk+eeM0aI/P81bOhG+dZLLLV0Y7s6Wndz+YUafK1+Z5p0uyY0zQs5rZNR2fUh3v2LDCswOm0Klpf5+DsoI9/8zyYeWLpr3dAv/B3+WUVvlK0k+ljFS+mky9tdLk3x0KURidWr0A3vrJA/IGDH0qRkn099cfnJfVefO6Pf00CQ37O6j3VhJaox2ev2MkPOPM2oaPaa7X7awzL7d/etp2edmDIhxeHc/dyPKzMlqdM1wk4z/g0tlNFt8ZHe/c5p/+u7+6bJ1rpTkoRkXw3tk37NbCgDq5P7k9plq3S9Nv3VG2Pns7n6oY8cwnQdfKiPQ+GVVfSwjjLlWkm+vcBxe+n48Q0YgcseM5q236+4379LCr4OFz89iYH6RJC9JcomMLmruleSojJZfD8908393eP/snIX/j3Nn1NC8e0YXI9/PqHn/4IXP1WmTfDzj3Oe+3f3a5cetdS7rARmDCx6YcT3wyoxWMr+azs3undGC8ZDuPlalh7Uj3GS3VFWvyrhIu0p3f3lqbndS8rv+lZ6RcQflWj31B8mOmQLjgzI6aj66u/9iurg7Y3cfOy1zSMbF9FUzarO9O8kncvLF4lO6+0G7+0F94YTugIzmSN/L+LL9/LKTvD/IqPFzy4waV3/tAnnzWTjBOm9Gv5AvW7xArqozZgwadd+MLheek1HD5SMbUNxNqar+J8nPMvrT/GZG7an7ZdT8+UbGYGSv7TG4Fqs0fff9UUbzxjtnDAjyuCRvXKohO4Xyd09y14yRo/91T66ptmQxoKmqi2X0432tjIulo5I8urs/MM3fK+MG1b2SvLlHf95soGXfrRfICDhvluRcGTdWHrFwg3EpoD5PRtByzoxWJe/elRfEm8VCrbprZdQiPF+SV2ecw52weL5WVbfNqJX0me4+bHH9XV7wTabGwFXPzWiV8/qM77ibJnnTYsi37LO6WPPsoIxz5y8mufHucsNvarXxvu7+3+n5fhldpdwro//in2c0Vf9Jkn/v7odvTEnZjKrqrUkunjGuxtcybgBcJMl1e2H8jKr60yTp7nfv4vItXfNdIMnzMvKItyd5bZJvZYzmfvmMGtyPdjNobQk32a0snCj8ecZdkjdknKD+cGGZs2SciF06ycV351BtV6mqU093pZ+f5OwZ+/eDffJIh3fJaKp33oy+fo9P8oruvus0f7c+EV74ontBxhfaQ7v7vxbmnzvJj/rkJnFXzAiAH7Y775e5q6r/zqjN9bAe/agun3+hjAGhbpnkOd19l11cxE1lIRQ+c0aNjF91998vW+ayGf1uXTtjVMl/zxhF1XF6J9To3+nySR6Y0cz6LRmjoh891SQ4U8aI6d/u0R/Ubn3DaXsshVpVdaOM8PciSf4rI+y5QJL9MpqdPWLhht65k3x/dwkh5m75uUWN7kNum9F9yF5JnpXR6uTX0/w/zghB39rdr9uAIm+4heP0H2eMJr9vRv9x50zyoYwQ6q3d/bMao13fJsl5kryquz/rQv1kNUb4vmHGjaPLZwwkcrvufs821lsMOB+XcQ5xte7+2joXed1V1Y2TvCajYsRzM5rlfnWad9GMfbVPRrD5/CTHtK7D9ngL11E3z2jhc4c+ufuCT2eM4fB33f3dqeLBr7v72wvrr/t15lTp5/TdfXxVnWo6tzpLxmf63jl5kMGvZXTXduSuKtueRLjJbquq7p0x+mVNP1+f0a/bX2T07/J33f3vG1fC+Vu4+Fs6GX5Ixmi7P804KXlVkg8vnKQdltGs72dJvjats1ufCC8E7hfJCGv+IckLpguDc2d86d0so+nCEd39tI0rLduy8Fm/S8ZF3t27+6UL8w9O8tskv+nu70/TrpfkC1Mt8j06NKrRBcN/ZQRpH+juf5im75VpjKHp+c0y+uD69+5+0EaVd3cy7eMDMvqAOjyjRudzM/qh/oKT65VV1VczBr579FJN4qq6TkZN2Jtn1Dx+SkZLhD32f3szW6x9WVWnyqhle8skV8kYvOWIpeP4FNj9tpc1n93TVNX7k/wy40bUtzIqBNwnyZ9knNs9sbs/PC2r7/rJdCPp4O7+4sK0R2bcWDo+yRmS/GtG/+rfWzhH3C8jAP1id39vWu+MGbXE9+vuS+/q97JequqaSZ6c5KJJXpdxvfCe7v7xNH+xJuse+z/I76uq52Scu9y2u79RVXfL+P69WkYXUF1VR2Zcaz5qvY9LC9fBF8m4vrtekv+XkTm8Pcknpu+SvTK6Hftmxg3QH0zr79HXBOtBuMluZakG4fT7ARl9Ld0ho3bVPhmhw/EZd5jvukHF3K3V6CvoaRn95Hwho+bVUT31t7mnqqo7ZwxscLMe/dmdJ+ML+UYZTeTOluSCSW66rbv6bKzpJOUzGV0MPHy6QDlnRkh9REaNg//O6HfvxxtW0E1oqpX8wYVJf5Pkxd39i2n+YlPg02YM1tJOAHfc9Dn9oyRnzagZ/tmFeedO8lcZN/oOzOgm41kbUc7NrKqultFv1t9297OWhWSnTvLEjFqdP83oP/ZQtTY3r8UQbroRdfPpcdGM/sOfsKubMG4mC0Hb2TJqJf/XYu2ijKbCN8u4iD9zxg2oZ/QYYZ4kVfWijKDlVt393mnatTMGYTou47rkrzLOEx6V5D+mcORSGTdRHtfdj1rY3jWTfKm7j9l172J9TN9JWajw8LcZ3X10RsD56oybnoJyVlRVT8voUu7C042qr2cMmviPPVoQniOj5eZnk9xzV32WquojGd13vDtjAMI/yxg/4ZlJ/qe7v7CwrMB+HRl6nllbuGNymYw+HC9QVT/LqO79iSQvqKp3Z4w++IdJzpIRJO0xo3Ovp4VabGfK6Pz7uO7+SZI7VNVTkjw9o5P5G0x3296+dEd6D/TFjJDhmlX1o4wvvHNndDD99Bp9M709yfkzBrxi87pgRs3Dry98nh+RcdH3Pxmd4t97+vmIDSnhJtXdRyfZq6oenHFh95QkF6+ql3T3hxaCzb17YRAmweb2WaEWwe0W5r0to7bQ26YaD0/K+Lw+JOOzyu/7QZK9M1p9ZNq3e2UMUPjLqnpokj/POGa/UbC5eSw0Y/yjjG5eLp/kO1X15YzQ7ptJnlJV78zos+2vkjyrqi60J154LgSbeyW5TJIfT4/Fi/EfVNWzkrw1o+by4RkDevzTBhV7M/qfnFwjOFV1wSTvXAjVP5FxrnenjHPk21fVmzL6pj8+o6XZ7/Z5d79t17+F9bEQau7b3b/u7iOr6tkZg93dK6Miyiuq6k3d/X8bWVY2rY8nuWVVnT/j5viJSZ7ZJw9oe+GMa6sX9+hHeVc0R796Rrdrd8iozPObqrpExiCNT8jJ18Bv7e7v7onfL7uSmpvM1rIaPp9Pco6MgSgOyOgw/jUZd3K+sMK67prspGXNRl6WcfH39z0GyVnsL+iWGQHnAUnellGbbY8bJGSq2ffMjFGh909yQpLbZ1wQn1BVl88YyOCpSzUl2Jxq9Bn5mYyaPo/LGDjoehknWP84LfOhjED7do41K5tqTh2ZUXv5SxlNpF+pFtDOm2oRnDXJCzIusi+QEfD8YUbz6b9fWHb/7j5hQwq6SUzH5wOTfL5POVjKaTLCigtm9NX43l7oRqVG314vyuh39x27tNBs0cKN1wsneXlGf6nfyDhP7CSfS/Kv3f28hXX+LOMG7QdqN+8uZ2umZp5Pn56+Ocmde6HvuoXl9ssY4fozU9DvvHqZGn1Ivz/jJt5/JfnYwnnz+TP6lr5dkitktHR6UHe/rnbDQayq6tJJvtzdx0/P90qy90Lo+6iMsHyv6KOcLZjOG9+TcdPx3BmtT541BYoXyKhQ8Cfdfe5p+XU5Li27Br52Rqutv+juLy1rIfDnGf//Z0nyjozr5D3uGnhX2mujCwA7oqrOVVWHT1/8S8HmfTP6sLltkktmXCj/Q8ad509U1WOnJuq/4wRsTeyV/K6z8ytlNCX5fDLuztYYpTfd/fLuPldGiHGtTHez9zTTxcGtM2o4/E2SP+7u10zB5lkyPrcHZVyIsYn1GKDs7zOC6qMzmpk9MMm/JL8LPH6aZH/HmqGqblJVX6qqyy1N6+5vdvdNklw94+77Y5M8p6ruunT8YPtNzUZTY/TiC2XUCn9Yj76lH5xRM+15Se5fVY9bWn5PDzYnr0nyviQXW5zYo7uEf88Iip+cUWPkPAuLXCXJpZKcZtcUk+2xEEw+O6PfyL/IaHp+7iT/nHGz9QlVdY+F/4M3dfcHlq2/W6uqu9ToC/13x4+MC/C/y+g+5DpJHjmFBqfQ3Sd290eXakz5rhuWvrum/fn/Mmq5/kPGsfdvp1Az3f2ljAGtlrpFuHFPg1jthsHmDZN8LMmDquoSU/hz0lSzbt9psddmHIPvnNEnKfyeqcb9jTOanf86o+XEHavqfkleluSaGX0DL7VkWdPj0vJz0xrje9wmY3yPr09l/PV08yfd/eruPk+Sx2fUzv7WWpaH36fmJrNSVf+QcXfkcxkjTr+qqu6ecQJ2+z55dO79k1w842LuTkm+m9H5+TM3puS7l4XmS2fPOHl7csb+/Wkt6xuvTtlH2Wm6+xe7413pRSvdKayq/br7xJXmV9XfZYRlz+zuR+7a0rIjFj77p8sIkM6X5Hs99dNWVftknOg8M6N/1TftybWAllTV7ZM8MiNceGmSe3X3j5Ytc9eM/fbS7r7tri/l7qGqnpDRTcufdvdXltUiOHdGDdlLJblMd39j40q6eUy19u6aUfPi11V1hYyBPZaa5R6W0XTyIhn94n0rIyA7LMnHu/sqG1BstqKqrpTRWuR+vaw/2aq6WMaIu2dLco3eA/sEr9HH48czAqXr9MnNOpf6O75YRnP9O2c0PX9ixmCIP9j1pZ236abe05NcLiPsfG6Sd/Q06ODubmrt8rgkf5kRAD0to9XSMQvLXC0jALp57wajwrO2quqQZZ+XS2dULLhZxvfyzzKOZ0/r7lfvojLtm+SNGQMFJaPvz4d191em+ZVk34Vrv/2nCi279TXwRhNuMitTjahrZ5xsXSFjNLJPJ7lEd99o4Q78UlXxA6flHpDkQtPdE9bIVEPoSUnu2N1vXDZvqb+ryyY5Kcn/9h7Sb95CAPYnGSOyXjTjrvVHkrx5KYSflr1bxoneq7r71htSYLZbVR2w+PdbYf49M2rmfr27b7DrSra5TcfmS2XUoLpzRjD0qO5+9LLl9ktyqu4+fvmNErZPVT0g4yLx3N39rWna3hlfjSdNQd4bklypp9GOOVlV3SxjQIInZ9QE+XSfPFDBLZLcJaOJ2c8yBsB4Ru8Gg33sbmoMxPLajPOTV07HoMr4P+gpcPpwRlPgx29gUTdMVf1FRtPgl9bou/7ySV62UFHgTBmB3F0ygoRPZfQJ+eapVjMLljVVPV13/2zZ/L/MaOFxQEZ3Fi9KcvRisLw7WeFG/qUyQs7rZNQQ/veMGsInZtRuvWOSC0ytY9iD1cn9h183yf0yAswTMm6Av6S7vzWdL542o2XF3kmO6ZMHFV7T5ugL4fvduvtjC9P3yuiW6qEZx8rXJnlOkvf3GINi6fyrBJq7hnCTWaqqiye5aUatzHNndML9V939P9P8U9SUmmqrVHd/fSPKu7uaLh7ekuSG3f2GxdqJ0/x9My4Oj0ny4N4DBltY+EK+UsaF735JPpnk0hlNld+d0dH1WxbWuX6STy4FEWwedXLfbZfJqJF5+SSnzrgo+Y/FC7yqulCSFyb5UZK79Bi0Ra3NZbW3k/xJRj9jt07ytST36O6jFpbXd9tOqDGY0NEZfb09oLs/vWz+bTMuKm/a3W/dgCJuatNN0YdnhPDfy2gi+bru/urCMock+WF3/3QDish2qKrzZdxQfFuSu/bJff0t3Xxc6rvt9Un+bk8/5lTVURnNJl+W5BXLjsnnTvKnmbrUSfKQ7n7MhhR0E1s4X7hjRl/c/9LdL1xeU6uq/jmj6f/xGTXCH7M73chbqNywf0Y/zxdM8s3u/sg0/6YZIfmFM/oaPTDJmTL2g9ZLe7iFY/TeGV2Z/STJhzIG8rx2ku9nBI0vy+gned2P3VMLu3/O+K74t8UWMdP80yW5R5IHTZNeMJXvE4vXxaw/4Sazsuyu6P4ZF8k3zaiWfqokD+/upy4tmzGi6R4dLKynGqPBvTejecmtF6bvOzXt+6Mkz8/ocP6uG1TMDVFVn8wYafcR3f3eGgMrvTQj4PxpRvOFV3b3RzeulGzNwgn6aTIC6gMyaor/ImNwlk8mOaK7XzEtf2DGSfyx3f21Pbnm4Qo1NvZN8ts+eaCxgzKO20uDZ3004wbV7w0Ax/abahHsk1E75u+SvCujFsH7ps/k5TJGR79Id//RhhV0BmoMRvOYjBprH8gIId7fKwyuwuYynf/tlxFM/3VGX9f/0gv9y05dDbwyyaO7+6l7+k2VqjpbxoAut824Qff6jC5CPjHN3ydjYLKbJnnRdDzZo/fZooVg81wZzWPfleRx3f3JLSx/zozw403d/bjdYV8unDMt/XxSRiWUU2X8P7474zptqRuf22acB/wgo3XXv21U2dl8anRV9A8Z54YfrDF+xoWS3D/jc/P+jO/o9y+vJb1O5blyT/0yTzeD3rdC66PzZJxj3SnJ5zPGUXiOyiu7jnCTWVoWch6U5GoZ/Wv+WUb48A9Ld5335IBhV6iqh2TUcnl9kicsHPhPnzHIygOTXLK7P7u7/y0W7jbeIKPpxD176iC+qj6dEeA8e3qcN1Mz9Yz9prnCJrNwgv6MJNdN8rdTDeWLZ9xF/mGSgzNGU37EYlOVPdUUri0NKnbxjAvlhy/V2l7hbvc/TMucI2MAnJdtQLF3S1X11xkn/mfOqB1zXEZNmROT/GV3v3V5jSJ+X41m/I/N6IPwFRmDgHxiqSYgm8uy88P9Mvo6vFOS/83oauDbGcfteyQ5R6/zqLpzsKx2/WUyzumuknFx/sqMG7HfmObv190n7sn7ayUL538vz6iteOfu/t8paD8go3usnyf5vyRvW96Sae77s6rO1N0/WtgPV87oz/XZGUHvuTO6pblMRqj7kO7+8oYVmE1psbVTjZHG75Pk+ovh5XR9ec2MmzEXy+jH9s69Dv0BT/+/v+s3c5p2wYyamefLaH30mKVrvYVlrpwxRsifJDlPj4GQ2AWMls4sLZy47tvd350uiO+b5N4Z/Tu+sapeVVUX3Z3DtI1UJ48Y9/zpcY0kr6iq11XVo5K8PcndMjp33u2DzeQUo4VeNOMk9svJ70KG82X0zfb+JPdK8tskl0xyVuHC5jQFdOfO6B/qORm1lJNxJ/lrGc2qj0xygyTvq6qXV9WpN6SwG2xqkpMeI6Au/Z/fKmOgrA9W1V9N839dw6mmZX6cURP2OoLNHTOddC/9fqmqunNVXbuqrjgdb/8joybxIzP284kZtQh+1xzdsedkVXXqqrpkVd2kqi5WVaeZ9uObMvru/ruMprlvzxh8iE1i6abKyU/HCMzTBenDMi6Qf5NxrvKWjAFdTkhyh2mFNR9Vd04WjwPd/fHuvlFGP5v7JfnHJE+rqtvVGBTyxGm5PXZ/rWQK9P4gI8x4TcbAp0lyo4xBR/4xo0bX0zPO/ZKcfByf8/6cznF/UKPp7tL30o0zQs3HdvdLkzwlo2ufB2Xso/+tqkdU1Rl2fYnZbBZujC8Fm8/IOD7vl9Ea5Xe6+6fd/ZqM/61HJjnjegSbk8cnOXyqab30+l/IOL9d6qv5eVX16umG/tIyH0hy9SSX7+5vLvuOYh2puclsLNSiOl9Gk9DLJ9k3owbVkdO8mqbfKMk9Mw6IZ11+h5Qdt6w2xP4ZIXL65FF4b53k9knOnzHYwjeTPCPJs6ZAY7cPN5dU1R8nuV53P3h6/vGMu/X37u7jquqiGfvmr5J8S8CweVXVNTIGfPrH7n5dVV0gozbLHXr0pbVvRu2EnyX5VXdffwOLuyGm2lEvy7iA+88kJ04XeufKaNL7F0kunlFz+RFTwL/UVP0uGSOoXq+nztfZPnVy/753y2iGfsA063MZAwa9oqduL6Yw+cRMg6lM02ZdU2gtLOzDS2QEDzdbmP2ejOP0m/rk/hoPytjXr+ruN+zyArNVU03wq2a0jHhTRnPAL0znhodk1GK+fJKvJvlY7yGjVS9XJzehPnNGDc0LZ7REeE6fsr/6vTJa39wlybkyRpZ/30aUeQ6mAOQjSZ7e3Y+pqiskeUnGze57J/lKxvfgm7r7DhtW0DVWVVfJOH5eLcmXktw9yRkymhPfYtmyp844H7hdRr/GP05y4d7KQI3s3qrqtN3984XnZ8w4p7xSktNn1IA8YqXWElNFm/27+xdr3RJlutZ9TkaQ+YmMPjffmuT4hfOoy2VUdLhhTh4o7FHOZzeOcJNZWDgRO3fGRdtFk3wn447OWZIcm+Tvu/vF0/KnyahJ+Ote6BSd1VnY/2fNCCLuntGh82eSvKFPbnq9b5I/mOb9ppd14L8xpd/1pgupU09ftudM8t9JPt7dd5nmXy9jQI+/7KnvITanGoNOHJHkPt39w6r61ySHJblRd391OrH6UEZ/qs9cjxOszWz6rF8so8nnO7r7mtP00/Q02FJVXSzJLTP6ajt7xgAfT8m4qL53kvd29232tOPEWphqzH47yVFJ/i3JrzMuMq+QcZH5qowmpV/d4kZIVX0iY7CC/8i4MXdwxmf24kn+vbv/dgOLx1YsnJ/cOeN79UsZIdIVMkbQfWZG2PSNDSzmplRVr8yoYXdCktNkBJxHZPouW1juPBk39Az2shVTcPe6JFfMGA38mhlh5iO7+51TLcVXZQyQ8pe90Afs3FXVaTNuDt0nYwDN72cMyHbT7v5S/f5Ar2fMCEPP0fra3GNNx+1bJ3noVNtxafr5klw5o0uRq2X8Xz0246bULqsoM32uD82oef2nGaOhPyGjj9ilkdn3yugW7zYZtTV/knGj6InOa3c94SazUlX/k9Fvy6O7+5U1+r24ekaNwSskeVJGPy6/3MBi7raq6tUZofHRGXdbD81ogvL2JP/W00iI07Kn6FtvTzZ9bi+Q0efSPhlNGs/X3Rfc0IKxQ6Yg8/EZ/W9eYrqgvmDGCOmv6u7Hb3UDu7mqOnhqfnN4Roj5gu7++ML8a2RcSN8goxZVMmq5XLe7f7wn1e5eK1PNoCOT3L0X+nytqhtnDKJyvozBcF6dEXL+fMUN7YGWLjpqDGrxjCS37e7/nubtlxFw/nVGM8r/yOhz16inm1SNQfw+khHu/yjjO/cOGd3jHJsR2r1uT62tuWThc3/9jObTD83oN/qPMo7Pt8oIiB/cy/qRm9Z3nN6Kqjoko+uaP86oRX+/JN+Z9vllM84X/ru7H7Q7Bh9VdfaMmr5/ntH8/iVJ7rvUbHh5yMmercagU/dNcs3ufkdVnWXhs7JvxnH8hhmVas6c8V19ZHd/bReX88wZLZEemOScGTfNnpXkqwstGM84LXO3jBZMV9mVZWQQbjIbVXWRJB9M8ugkT+lTDkpx6YwT1z9O8sfd/b8bU8rdz0KtiOtlXCDfN6Op+UlV9b9JzppRg/a4jA6W/6ONJnsKU43jt2ZcLJ8qo2bQnbv77RtaMLZp+YVcVd07o9bhAzIupO+RcTJzwe7++u54sbItSzcyprvXnTHoys0ymvE8J8lR3f2VadlTZ9S83yujudGnuvv7Lni237Ka9NfLqM1wxSlYPvXizb2qun/GRcF5k1xNk9LfV1WPzQjBrjY1YV4c0ODsGTVir57kcm0AjE2lTu6u6IwZtedf193PWJi/X5LLZhyvb5hxzH6o796kqh6YUbPwjj0NdlFVZ8moUX/PjM/86zOaWO7xg+XtiDp50KX9l2pn1uiP8wEZtdTO3d0/212D4qlFxyWS/G1GbbafJXlYdz9zYf5evvNJkqr6w+7+8tTq8vUZ/SK/ZOG4dLqMVhR/NT2+mxEuHtm7sNu5he5N7pzx2f5JRi3OVyX53tL/8lTp4ZfTNYFz211MuMls1Oin8ENJ7t/dz5oupGvhIuSgJN/IGMDmARtY1N1SVf13xsAfD+zuY6rqNkmel3Fy/AcZd9N+khFy3qe737JRZd2Mpi/FP8nYV58UwM/TFM49OaN7htMk+VaSJ3f3k5zEnKyqDs3JAye8MWMgj3f1+nX6vsepqndnXEAel+SePfUBOX037r1Qm+CcSW7f3Y/bsMJuYgs3LC7S3Z+fpu2d5KSpttU1M5r9X7O737VhBeUUFmognivjputlkrysu5+9/CbT1Bz4mhl/53/p7qduRJk32sKNkUskuUiSW3X3TVZY7pCMZpYPSHKeJGdpfSKewsK+PGPGZ+tKGdcgL0nyw2VNsPfJ6AP24hnh+rP2hO5rppp318iovXrNjBuef9/d79jQgrEpLN0Qn47jlXEMf33Gze8PZ/S7+bo+uYujs+bkwf0uneTsG9FCcAphL5JxfLx5RjbxuIwuln6vX1B2LeEmszGFlx/J6Aj+Vt39nWn60oAAZ8wYzfijGbXihAxrZLo4flWSz3b3nadpX0zy/ozmkL+sqqdmNEP5XpIbL91xg81uR2tbTsei8yY5R5IvLAQie1ytzeQUtad+7/1X1Z0ymvKfOmOwoZcm+aDmvTuvqq6T0WLhkhnNcA9P8pqFJl37JKccCXl3rSm0M2r07XV0Rj+N91xeS63GYHn/keTPu/vNG1BEtmI6xvzH9PRTSW7d3Z/bwrJnXWqWvgcfr0+b0R/iqTJuzt2yuz8whQu1UPto34xjyxm7+61u3q2sql6e0VLh+Ix+e4/LqFX23CTHTMHNhZPcMckXu/vZG1bYDTLdXLhVkntltNx4a5KbtC7E9ngrtI7aO6Nbh7tPk45K8tLufufCMufPuIH7hfW8SbD4HTEFsadbDC+nY+nVM869LpvRzccTexrIkY0h3GQ2pgu1h2b0I3ZkRl8XX1ionXLFJK9M8sL+/+3ddbgc9dnG8e+ThAR3d3eKFniRFmiBIsXdvXiLQ3Frobi7uxR3K+5e3K24e0jI/f7x/JYMywkkITmz5+z9ua5cydmZ3fyy2TNn5plHpD1qW2g3VE561wTelnR7RPyZ7Bu0FnBzCS7vB4xN9mn6zBfRZu0lInYkB4kd2SjLK48H2Q/5r2QA6RJyYEWn9kzqTppOujcCDiPL/C8mj823Vd5/ByUGoVEBQg4L2B14hqxIuErS65FTgHcFppV7JLekiBibzJpbjQygPA3sB9zYmSWLXUUJNK0DLE72j34W2LbRsqKatdz0vLYMBnekkjW8KHAjefy4DhiLzOTalBz0dhB5w+mTyMnL/Uu2Z1ueH0e2aNqFDJivXfd6rHVExJbAfxo3piKHee5Dtn16lwwcXiDpmU5aT+OmfR9gOXJWwjjk8LWTgSvK93VPYHxyYOYRZBuPAzpjjdYxBzetZVVOHsYBPq5cyB1BTth9HLgIeA4YgUwPnxaY3HcDh71yEUg52C9P9tdcRdIt5f/oH2TJzYI+AbauICKWJS9GLlY3mlramSqZ85sA+wNPSVq8bGu+Iz8lmdGyBDlQy8HNITSoi+LI3oKHkb3yPiQDdJc4g+DHfi5AExHrk/2zxiEvpt4DZiQnSK9XzRyx1lMycH9HZofNSVabHAQ85nOSn4qIWcnhbhuR585nkTen3y7bfVPkF5SAzOpk24/Xyo28Ecgsrp3IAU23AUeRwfa2H7JZriV6uXrDGiLnZjxCJi7t0ZQduSAZ5JwLeIqcmn7M8P5eqgQ3jyazrt8hzwvGIrOPHyNvCt1T9u8JTKGB/eV9M6gmDm5aS6r0spmcDJqNRjY9/7hsX5Hs4bJg5Wn3Aoeog+mONmxFxIzkhPQnyP5Cs5EB520kneqTYmt1JRP8G/Li95+Vnj5D9Nmt3IT5LfCepDeGz4pbT+Xf3ocsbzwf+JcGNoGfiCzfB3hO0qfl8eklvdCu2StDo3Ki3YtsaL8C8BXwAvCGpBfLftORA3D+SAY5l5N0fy2LbjFN2a6/I/uVfgK8qzJgplyg7Aj8lmyl8BxwnqRH61m1NWv6f+xFZoE1Ss17klO/VyYn1o4OnEQOnmibY/OgNF9wl9Lz+cis13WBXuRNqiMciOtY5fpkTmAOYG1Jf+xgv9HJ7Ni9yHPk8eWe02YdiojdyIz7y8mbid82bV8XOBS4RqU92nBcS+N8aw6yHd5+ZK/mryOHBS1M9v2cnoxNnDs812NDxsFNa2kRcT0wLnCUpHM7ODH7LTAT8AbwhKRPalpqWyknxDuQ0+JGB74nS282qnVhZoMpIrYmMytWkvRIySaYQKWX7xC+1nhkQ/F9JZ09jJfa8iJiVzKQsIak+0v2yopkFtxU5N3uvSSd+jMvYz+jcrK9N1nyOBYwAOhN9qTaFXhBAwfsrUAOblhwEC/ZdipBia3J5v+jAAJeIntunirpzrLvj6bOW2toCvKvQWbUjEdOYz4BuEzSV5EDH2Yny6+3ICc171/XuutUec8CmJQsoXwH+EplSFBkz/rfk+/XSmTW8kyNG1L2YyVT/k3ys/cFmf16pTro/VfKa6eRdIdv6JkNWkRsQJ43PkQOr/1vVHpqRg707CXpi874XoqIQ4BVgWUlPVV5vBdZJXAmeWP/j5K+Gp5rscHXo+4FmDVrlD9HxJ+ARcjplhc0Nlf3lfSQpLMl3e7AZueR1E/SQeRd6U3Ig/zf4IfMCbNW9xVZfjpL+fo44PFyITJYysUiwIFkkOnWYbrCrmNU8gbHq+XrzciM2E/IC+UHgOMiYvp6lte1laDcgIiYn+wL+W8yEDEOGYToA3xeAncjA0i6ohHYLCfiba+8P6MBB5ATWX9D9mq8n6wCOSkiDomIqRqBzcb5iLWcg4GjgTHJC+E+ZFn1EuWi92tJ95FZiMuQx6PqMbstlISARgDgWHLg5kPl9/0iYsmIGFnSJ5KuICuitgeOl/Rpu71fQ6AneUPvTPJaen/gzxExYvOOkt6SdEf5swOb1vYa14kR0adUmzRcAOxJZpMfEBETl7ZHvSL7uH9D3sjqrO+lT4AJyF7xlHX0kNRf0m3kDJD5yAxOaxE+abOWUzlg/Rl4C3i4XJRUpzhG+X2xmpbZ1iKiR/n/eFbSxZL+S969xuXo1kU8QgaGdouIg8kMoCMqJdW/eFFXSrLnAjYG9iYHCLSjt8gMzY0iYk8yUPwosFq5YD6LgcFkG0KVY+ruwJ3A0cpemgsDk5B9TN8v+ywf2du0+vzhMkm0i5qKLDU/UtJT5Qbp+mSfxpfIwXnnRcR2EdHHwYjWUclAnJ78/zoCmL+UKL5GTkp/uuzTCPK/K+l6Sf3atAdaI1ngQLI35E1kz+MrycqbI4CdSok1kt6QdCR5ww6aEgosSfpG0uXkjZKtyZ9vlwJnR8RcDgqbDVrlnOYY4MmI+HdErA7MAJxCXv/PD1wSEZOVYOJ35bnD/RheuSH8OHnjbLdSzdG//HwZoWz/mvzen3B4r8kGn4Ob1sq+AMaW9Hz1wUqft0mBgyPCE/c6maQB5f8gKo+120WDdWGSniQDGZ+R5el9ASJizLJdg3mBcjhwFzm8pV2/B04mS0L3AHYDTgd2lvRKOUkcG+hHngjaUIiIMcgWIG9Ierk8fDTZn+o/JXgzAZmdtYozDgeqZIlMQl4wTQJ8Wh4bCUDSdWSfxgPIcv+DgYVqWK4NQiXQvA6ZSXNV+dzPQ7bBOJSB2eM7RcR5ETFK5fltdXwu58rfR8SEZADuOGArSbeQGUkfkC1D9gYOj4jNG1lUjffawf2ONc4NlMNDzgPWI9/HBYDbgf0jYopBvoBZmyvH5snJqqcFgVOBc8gKqMXJ8vQ5gEsjYtFOWlOj8nDtiFiOnCtxM9mGbbfIIWyUnzvjkK1PBpDVSdYiXKpktYuIMRp9f5r8FxgrIv4CnKkyzbhygjod2XfMn+OaNF8slIvrsYDn2+1CwrqWkgX0YERsQ56Y9CIDG7+JiHOBWxvHnObePpUbLKuRAZAlJX1Rwz+jJZQ72duTWUDjlXLQhtnJnncPSXqilgV2A5I+i4j+ZAsAImILcrDQRpWfnzOS2cNfOygxUCVL5ETyIupr8qLpRUnflAuaHiUz5MSIuI7ssdWubSZa3UfAKJXjyTHALUAjQ7MPMA3ZU7Vtzw8r52Brkf0hb5D0eURMDewMbCrpjIg4khwI+XvgNLKnr1XEwH6945DHkHkjB+ZdDNwh6dmIOJycjL4+OWxk84iYRdJ79a3crDUpeyPvS7Y0+pIMZk5GlnmvRA7h+oYc7rc68J9OWNP35Tr2DGB/SVdFxFrA2eTN+6Uj4m6ySuDPwB/IQUMfhwfptoy2/aFvrSEi5gPOjIi/A7c0BQjuJkuNdgM+iYgbgS/LwWcq8u79yOSdHhvGhrKE63LgRfLk+LthvyqzYaME5HqQAaI9yZOXv5EXJYuT5WUXSHq42g5DReRAgQPJ48+ddfwb6lApDR2ZPPmcmcwAelXS02Rpb2PfWcieyROT03iHeBq9/Si4fiVwaETsRA4QOpw8yW4MBFmaHBZyfnmsHctwf87xZMBrEfL7e1pyiNAHwPel1GyAcqr28fUt037B28CEEbEgGcScmzxmf1q2z0D2Ur53EDfO20b5GfctGRB+rjx8IPAUA4MF55GBzZvIrMOf3NBrZ40M2PLluWRw8zsy+3VD4NyI2KZ81u6NiJfIIOdUDmyaDdR8TiLpvojYGbiw/Fq5tHvYNSJ+A6xC9sY+qTy/M45LUwP3AdeXNX4ELBMRa5CtgbYARiAHCR0qab/yPB8vW4SDm1a3IE+8LgCujohDgcckfSfp9YhYhryTfB5wLXBPRHxDBjZnJUtsfED5lSp3pUcHpiX7VvUdzOc2sthWIO+wHdjojWLWykqQ7iqgp6SvgR0i4gSytHc74E8RcRpwuaRXmwJF+5IX1n+W1K/TF1+DSmBzFDKouzQ5bbon8HZEHAscVbKnegPLA/3Jqd2vluc7sDnkGp+784D/I8sfBwDPlczDUYG/kMPdjlAOAnEQuYmk64HrI2I94O/l1yIRcQbZVqIfOADfBdxMlgseS7YYOA94oJzDjEuWqM9KZv+0daCuHK/vBl6S9EnJNpyN/Jn2WtltRPI4fZSktxvPq2XBrakHefNjH2BecorzCZH9tu8hg5zVgM37wEUxcDhq237+zKoa59AR8TvyJtV75ab4bBFxPnB6RPwLuELZOurJRnVnudYcLt9HlWvgqcibF9MDb5RtfST1lXQhcGFEzEt+z38h6d2yj7/HW0j4pr7VrZR5rATsQk4lO4FsKPxauUieDliDDGhOR17UPQmcIOmUelbdPUXEKWQm1lGSLh6C5/UiswLuAzZ2cNNaVfXOcSlHHZ0Mbn4YEb1Uhq9ExFLAUWSw/wmyhO/hyussSmYIndwuJzWV4OaZZPbbKWRJ6Bpkye/mwBmVING4wIiqDGlyJuGvEzlMZU8ycAyZPTACMB7Zg3Ddsp/fawb9PkT2MN0d2IAMTFxHfnbbJgu7K6pchM5Flg7OBlwDXAa8Th6DFiEzcndvx0B15T2anwxqflg5do8I3EteuK9I9rvblJz2/VtJLw36ldtXuYH0OFmddJCkjyLiIGBd4I+lLD3IftP/kvRsfas1a10RsRHZX/MF4BngQbJyakpgS2BM4B+S7q1hbY8Ac5JVALtJOqmyrbevbbsGBzetJZSTgqnIk6wtyRKafwEXN3pZkBdws5A9ON6W9GlNy+1WKifCGwGHAf8gU+07PDg0BYcaWZu7kmWSCysnp5u1pMrnfTFyCMAi5BTvO4AzyVLGtyv77wzsSJaYfdX5K24Nle/16YGHyezB4yX1Ldmt85JZrK+VC8ElyGCbJ3UPocpndAJyQMXoZGbV9ZI+LvssQ77H45BVOGeS2WuftGNAp1klmBPAGOSN0S8kPde03yzAPsAfyfdxeUm3dfZ6rWNN5xsjSvq2cZEZEROTg3K2JL9HIFtkHAEcXI5XbRnkL1mDb5ABhJVKNneQWYink304zyTbWCxAZkpt6gykjpWM1zvJpIrDS9LFU+R577ElEeP/yCziHZSltWbWpBy3JyfPvxutjUYibzB+QSYyvU9WZl7RyWubmez9uVR5aB/gHEmvlu09yATUtvuZ0pU4uGktJbLn1RxkMGFl4H4y2PYfSd/UuLRurZz0vkz2YNpV0geDe5IbOYnzRTIwup9PjK1VVYJG05DZK33J3r6QJ1kzAv8GdmxkG5bn9ZLUv5rZ2a4iJ0geC2wg6baI+D153FgDuLQElP5KBh2WkfRCjcvtcpqyh28AFiODbp+RPajPkXT4IJ7bloGcjlSC8VuQN02nIW+Q3gXsKenBpv1XBDaUtFznr9aaNQJx5Xg9NdkP+c/A88BVwJ2Snir7jkIG+t8E3pH0v/J42wbqyrn0NuQQjCM1sC9cY/s/gM2Az8ky/78pW1y07Xv2cyJiTHLI6dWStoyIK4FJyZshb5XP62bkjIC1Jd1T32rNuoZy02AyMltyI7JFxsjkz+sXJM1Y07oWBY4jrwluIW8IXa827+HcVTi4aS2h+YSqlIz9nuyJNTfZaPhwshek08KHsYiYm7xr9i9JhzVta2TAzAVMX/qOVC8ezySn2y0m6Z3OXrvZkIqIq8kWGH9VNjQfGZiIvKGyK9kLaFmyzLFnuwc0q8qx4h6yFO/uiHicDCqsV7IGe5Pv4crkxOk361tt1xARIwHzSbq98tgWZAbB/uTPv5WAJcm+xi+SvY2vLvs6m6CichNjQeBWMuPqSrIE9zCyDP0sYF9Jr9e3UmsWESM138iO7Bn5G+ABoA+wEBloOom84Hy1sq8D/EUJcB5BZrZuK+nYpu0jA6PLfeN+oqPPUeTg0y3IY8cu5PCTq8q26clgSB9Jv+vs9Zp1JYM61kQOGZ6WPM+5SdJ1w7MSpXJ9OwGZQTobmZH9tKT3IuIvZOyhF9ln/kLgNh8nW1uPuhdg7avc6QQGNi+P7N1IuTtyLdkTaAeydPQG4KDIoTc2bH1Clu71ho7/b8hgz2rlhwAlsDkLWVpwqAOb1hVExOTkhfJdZK8fJH0t6WVyQvKe5EnOykoObBbluPAW8Arw94jYkuw7egDwZdltZjKw+aikN6vHEhukPYHbIuKkUu4IMAWZRXyKpNclHUH2EzyEPNE+KyIuiIjZJA1wQGegyoXQQWRW8baSjgNeAvoBl5DnFg9ExNY+p2gNJUh/dET8uWTKERELkceUDcm2F78DFga+JzPIj42IVSJibBg4sKLdlYBAP2Bbsp/d7hGxSmNbyRD/uhHYBA8Rqqq0Qlg8ItYp1yZnkz///g68V7ZPWM4p9gXmJ69XGv28zawDzceaxveLpAcknSfpb5KuK48Nr8BmzxLYnIzspXsTeTPoFuChiPgncDHZz/xcMrP0BrIFk7UwZ25abSqZfwsBq5P9NB8h787f3Ej/LplAUwHbA4tImqGuNXd1pQTg3Q7uSI8GXE/2X1qqBHqan3cMMLakxSqPz0Ke0F2gnDZt1tJKltyLwI2SNi6P/ejOcEQ8AbxDZh46uNkkIpYgh3lMANwtaZHyeGPYzYrAdJLecTbQL4uIpcnA5f+RPaeOJW82zSRptYjoA3xXueCel2wDsAbwFTBzCWRYEdn/7iyyd/fp5SLmKeBp8lxiVTIjA/J7fRa5j3etImIR4DYycHQecBE5XOKfwFqSXmhq27AR+f/bizx/2UfS852/8voNItOw0U5lNjLraGzgT5KeqWWRLaxyPTIBMCHwZPn6K+BIcsDJV+VYvDN5DBkJ+Jr8jL5K9t48wj/zzIZOHZn3EXEdeQPtOHIo7lhkZvYCZJXSOpJeL+ddf5O0Vmeuz4acg5tWi8pJ1/zklMve5CTCOcmLuzuA8yXdWHnOKGTJx8c1LLlbiIinycb782tgX6rGSd0q5N2pR8iTt/sqGbW7APsBq0q6qhoMCvchtC6kZAddTPZoW6Nxd7hSyjoKWXoyJrCkg/Y/VcoZNwA2IW9KfQA8B8xT/ny4pBOGZzlRd1NKSNcHNib7PH1OTkL/Q6NMNyrTOkumw0rAp5Ju9nv9YxGxOHAisH5pn7A62TdrSUl3l33uJn/ePSnptPpWaw0RMRV5g2QD4Ekyi2ZxYK7KOUf1+yDIbJutgQkkfVTHultFZK+4vsCD1fOyiJiEzE7qAWwu6WEfM34qsl/0ZmQweGZy0NifJD1ZuW7pA0wMrAmMS7avuYTs9yoHN62dxTBukzM8rzEjYgqyuuNQScc3bVsXOAF4DFhR0oeVbT52tjAHN61Wkf3aPiR7X91VLkAuIAOcX1Du3kt6tL5Vdg8laLMx8DtJjfKkifXjydDrkw3opyF7cH5CnuBNB1wnaY1OX7jZMBYRC5DHme+Bo4B/qwwQKheHZwKXSNqxjjvJraJy42M2cnjCOMD9kl4q2+cAViBL0ycis+LOlPRQ9fl1rL0rqV4Mlyz5LcnS/hnJyfTbqQyoKEHNHs7U/HnlfVpX0pnl6/PICa2rlYziiYBLyXOME/w5bS0RsTCZsblAeeh08jzxzbK9B9CrEuQcXdLn7XzRGRFrk0G5D4Ce5NC8/wFPkBVRqwNrA1dJ2qqudbayyGGDR5PTkvuT7bF2HZyMYP+8s3ZX/R74NUHJyrnnDzeyhofSzuRE4EJJl5WbZT/02Y9svXQsWTlw4fBahw1bDm5ap6sctJYlDypbSbqybHuKvJg7pfyaAniI7HNxqDMEf52SHTRSuQhYnywt/TtZTvNlaQEwH3li9ydgSnI66elkAOjjdr54sO4jIpYi78pOTmZvPUZezPwZ+JZS6tuuWRiVTNZ5yPLQqYBvgO/IDKDtNLB1yE+GgNiQKSfVUQlyzkUOr1iB/Fz+Gzi4EtwZrif9XU0MHAwwPjCDpLuath8DrCBpsvL1YmSlwt8bAVBrLSVAvTqwOzATGYg+k7zB8lXZpxeZJdT25yTlsz872VN6LLJX/ZjksXsAeS43V9n9DGBnSR85KPdTEfESMHX58hqy3+atkj6p7DM6+bl80RVlZhARV5FZ4wdUHhuqa8bSpuRKYB5JLw6zRQ58/c3IGMTX5E2h7YFvq9nXETEtGYM4SdKuw3oNNnw4uGm1KaXOG5GDO56KiE3ILKrFJD0QEX8gL6K/B86QtH2Ny+3SSobVm42SrXIhPS958bwy2XNsT0kXle0jSvo2IiYEvmhcSJh1dc0XchGxJ9l/bzzyQvAsMvvwfgfyISIeIrOADiIvkH8PrAWMDOwn6Z+VfR1wG8YiYmWy/H9+4H0yIH+83+eORcQpwKJkOf/rlYuUFcgA8R1khvGyZB9T9/BuIZWb39UMoLGAnYC/AZ8Bp5H/l0/6+PzD+zUl8HlzkK2UXfYlM2BnJPtE/pmsztlR0kmdvOSW1sjcAv4KvEbe4N8dGIEMrl9A9pn+PiKWAQ4D1pT0WC0LNmsR5ebKOcAfyKGT26i0lis3qjQkiQKlbUx/svf9l7+0/1Csd2Hy58o8wCjA9mpqTxMRC5JVjAdL+sewXoMNHw5uWm3KQWNpSbuXrx8F/ktONv0scljNCcC6wP+ctTl0ImJWsnfVCcD5wAOVlPtxySEWWwJLks38d2uUlZp1V/HjwRRjAKMBXzsDY6BywXw9GcS8oDw2OrAg2R9yVXKQwt8kXVPXOruySobsOGRW1fRkhuyrkv5T9mm0FFkHaBzPF3W27E9FxFpkpcFNZNChkeE3AtlLb20yu+1a4DBJD9S1Vhu0iBirmiVXHpsR2J+8IfsoGWg6T5WJ3+2kcuyYjvzMj0MO3Xz/l7IxI+J4YFOyl5yP3R2oBI4nIKehb0YGPM8kS//XJSuh5qxtkWYtJCImJfvUbk4m0NwCbCbptbL9Z0vVK99zGwAnkec59w7H9Y4KrFJZ783AocBHZMLDX4HZgGkl9XWWe9fg4KbVptwhHUnS1xExMXA18KikTcv2pcmD2zqS7qhxqV1aKds6lbyoexM4mey59ExlnynJASvbkv3zTgb2b9eLBmsPzaXA9uPMKbItyGHAicqhNdWA8ETkMWND4HfADpKOqG3hXVAlq3A04EYyO3MAOfTjY7Jn3gGSHiz7TwHsCrwi6RCfaHcscjjeyWTGxQ6S3iuP9yAH6o0AfOngcOuIgcNa5iEHtSxAllbfQt50vafy/7gE2Y9zTmAqSa/XtOyWEBG3k8eMQyRd3VErlUogdITSbmVS4C7gaknb1rDsllI5Fo8A9ALGVhm6WdlnLuBfwGJkRdnb5ICy51zlYTZQRMwELAPsCIwP/APYW4MxiDZyYOVLwBVkstNwT2wq57ObklUyk5AzP94n23fcJ+n2XwrMWutwcNNaRkRcQ2at7EieXGxGnri6bGwYKL1Djifvqt1H9jS9WQOnpvcmy5ZWJbOEJgDWk3RePSs2s7qUcv19yX5Ee0s6rDz+o9LzyknsSZK+cMBt8FUCyWeSE6GPIafuzgCsR15Efw7sRQ7W+76j53fuqltfyXTdE9gB2EfSgTUvyX5G5fugJ/ACGdR8jjz2zEZm1Z8PHCHp6fKcPsB8ku7sKJjX3VXesz+RJfobARdXS/o7aMFSHVw2Gpnd3FfSInX8G1pFU2DzaPImUw/ys3igpMeb9l8ImIxMxni+HT9/Zh1pnB9GDqHckjyvafSu/ZQcznVy2TfI4YiNgGfjuHUQsAGwkMrwyk5ae5A/b7Yu654Q2FTSuWX7MJ0Cb8NPj7oXYFaxBZm1cj5wMRlo27LWFXUDEdGj3FV+SdISZK+xCci+VYdHxDKRk0a/k/QkcAiwBnnX7M3aFm5Wg3KCQ0RMGREj1r2eGj1PDlL4Ctgvsvk65cQ1yoUgkp4ly3sd2BxC5UR+LLL/3fHk0LyXJV0naQ2yx+CowC5kP9ifPL8Tl9tyGt+rzSR9pWz+fwiwf0Ts1ebfyy2t8jneC+hN3lRdAFiezM48gWzJcFRpIYKkvpLubLxEJy+5dpX3bH6yD+kzzQHNxu8RsU5EjNoUgJuSDBof1onLbnVHkEGVj4DXycFMj0bE2Y3PHYCkuyVdoDJB3YFNsx9uEnxXuXEyMXmDcSZgG3Jw54kR8XBEzKv0fePneDl+TUOWgh8CvNyZ6y/reZIMbm4N3AmcHRGPRsRikga0+zlXV+HMTWsp5SC3EHmn53FJT9S8pG6juWwmInYE9iP7u50OXAo8XLmLNqakT+tYq9nQas6iGJqAW0TMTgb2VpT08LBeY1dR2oWsTGYRzg08SJb53lO2jwAMcDne0IvsLX0IOYn3sJK9FhpY/r8wOQRnP0n71LfS1lPJuFoIeJb8LFanGY9JVigsAmwo9xZsOdXzksihkksC6yoHGlYzDdcjex0eI+mvtS24xUTEtsCRwBiSvqg83siCmhC4lUwa+Gfl/Qxgfkn31bDsllPKUh8HjlQZkhcR8wMrkpVMo5JZ4AfVtkizLqBkXm4ELKPK/IYSuNyYbK0D2TZmE1Xan0XEJWQw9PcqA3DrEtlffg0yMDsL2Y9zBbmdTctz5qa1lHLn5C5JZzmwOWxVU//L14eSGZxXknfXzgK2iYgZyvZP61mp2a/SyFbZPyLmH8o7rQcBH5J9f9pCCaoREVNExOylvOhtSceQfTX/SR4v7oqIcyNiIkn9HNgcepHTdv9LZqf9PiLGlvS9svfgCGW354E3gGlKWZQVJbA5BdmT8U3g+oi4NiJ2iojFyYEA65HT0c+JiGVrXK51oHJe8g9yyu74wPfVz3oJ1J1NZtL8rrQdsNQYiHVQRIzXeLDyc286YETgo+pNv3Ku7cBmIekd8ibSY5XH7icTANYmS//3johPI2LRelZp1trK9eVIZE/aF8tjfQBKVcrfyfY7L5Et0pauPHdsslJoh7oDmwCSPleW0C9NVtZ86MBm1+ATZbNurFq2FxG9Ixs1j9t4TNIXkjYC5iCDOYcDl5TMNbMup1Lquy5ZxjgzDAzeDUqlHH05clDO39spwF8JUl4BnAOsHjloDElPSdqdbLZ+Jpld9b/IydQ29F4iG9b3JNuF7BgR05WMtX5ln0bA4kuXP3aoLzkI72/kEKZxgd2AG4CHyf7S3wNjkH28rcVExFTkMIelyVLgFUsJ4IByk6URqHuO/H4YbxAv1VbKz6ynyWGcWwC7RMScjeBvyQrfihzceWLlOW2vtFZp/MxfKiIeIKclj14eG7Ech7+SdCOwPQOnpfva2awD5Vj9Enkj/P/KY33Lt1ufstsLZMXg4pJOrzz3Y/I4dnPnrvrnSXqTPMfYsO612OBxWbpZN1Yp21uOLAf4DTnh8QGyv9vbTfuvSfZ3m9O9RayrKlk/6wLHApdLWm8Invcs2RtoQ0l9h98qW0cMnKTbhwwyHA18SU7wPh14sHEnPSJGBVYgy47+IunFelbdPZT3c14yOLcscA8ZXH6O7EG9c9k+t6T/NbddsB+LiEmAfmQvwtmAqciy9LHIoNmdg3621SVyAvrKZBnwuGSrhj0aQf7IKerHAp9JWrK2hbaoiDgZWB94iwzqf0XepBsJ2ELSxeFpv0TEaNXy/fLY3mTwYkTyZshKkj4v25oH6E3cfN5sZgNFxPhkFnQv4O/ATZI+K9tGIG+Srw8sK+nDSgsN92y3YcLBTbNuqhKwmIss23sfuIVs8rwEmal5HHBIRxfLPhG2ri4i1gFOBc4GdpH0SUcnUJWTq52B3YGFlY3Fu73KDZARyUFu4wCfAEGWS49KZheeDzwt6cvyvLHK++lg2zAQEeOQwc2dgJnLwy+Tx+wLlFOhfUxuUkr5Px6M/WaV9FRnrMmGTqks+TM51GVJ8hzlYjLoNAV5bFpL0nPR1EO83UTEnOQxekRJ95bH/kQOwpifnDT/OHC8pBvqWmcrKcfY88ifZ5dVAucjkp+7NckhVh+RlRunlu0BjFANcprZT1XOpVcl+10PIL/nbiPbiqxM9tz8r6Tl61updWcObpp1cxFxF/AdsLOkRyJiKXJYyuPkRfR/gQMkXVXfKs2GnYjoRZaijgocQGYt/61xsTKI50xAZssdQw4OaIuAXSW4eSKwOLCTpMtKRuHYZDP1rclSo1PJKZjPt8v705nKRfSUZBP7TckbUbuTx+sX/Z7/6KbdvGQwYm5ywvYZwFnKQTRBnt8OcEC4a6jedIocZLYqmd3zGzKI9y9Ju5XtfYDv2inLp/E5jojfAzsCy5A/4z4FniB/Zt1d9p0Q+ALoq4GDydo+K6oEhB8BLpG0eqnUGKWRyVnOARoD9OYlM2B3bGR7+z00+7Gm43YvYNRGO6eImAw4mDyfGcDAdg5PAktJeqfdb1LZ8OHgplk3VLl7tjA5KGgP4MJysfco8Gp5bBfyRO4z8gR5aUlf17Vus6HxS9mDEXEOeYK1JXBqRxcoEXEqecE4d7uVnUXEuOSF3G1kqXm/yrYeZCbVqeTF9N3kzZBba1hqWyiZRLMAfyUDeM8BhwE3S/pfnWurUyUQPybwKDAKeXOuL7AU+Rn+p6TL61ulDa3m4FFk7+91gD+RN2LPIjPwPyjb2yJrvCmA8CoZuLwUeJ18bxYke9wdCfxD0mcOxHWsZAf3KVUHh5JBl7PIm0fflX1mJc8X1gImJac6b6EcOmRm/Ojn8cjAKsDm5I3GvsAJks4t+00PrES2OnobuEfSew5s2vDi4KZZNxYRa5Ol50tLujciViZLBJYoZY4jAM+Qwc6HlENDzLqciJiI7E/4Avl5foDMyngrIsYgP/cTAptLerjpgnFEspH5S5KurudfUI9GyR2Z0fKOpCXKcWFA9cQzIq4iT1onA2YENpb07zrW3C4iB4P8nrwJ9X9kQG+ldgu+N1Ru2p0BLARsLenGiPgtWfL2ITAJOdl4/3ZpLdHdVIOW5fi0BBnkXJ6sQjlC0oE1LrFTVT73u5O9IVeTdEdl+xIMPEZsIemsmpbaZZQsszPISeiPkxUbt0l6vbLPouTNpU2AdSSdX8NSzVpSJbh5Cplp/ww5If235Dni8sB1DmBaZ3Nw06wbK6WlW0k6uHx9DXlnbR1J70fEpMDlZOnjraXcry2yIax7iYiNyUD+18DI5ECF58ksl8uB6cmhLQ8A60p6qen5fYB+7fbZr1w4n0f2HVtO0u1lWx+VoUoRcT4DS9MvASYC/ijphXpW3j4iYjwyi2hhSavUvZ46RcQ0wK3A8WR2yBcRcQkwHTkZej2ypP8bMsi5maRv61qvDb2mIOfo5CCztcn2GRtIOrvG5XWqSjBuNmAhSV82HZ/HJqemTw7MoTIAzn5eRMwHHETeRLoWOIEcoPdh2T4qsICkm+pbpVlrqQQ25yGHcO0MHFnOJW8FepJDOV8tx6YvqhVBZsNTj1/excy6KklfVgKbo5GBn9EkvV92mYYsZxq7cXet3YI71nWVkmkiYgZyiu6IZJneisD+5OTYHsA/y+OfA/MBR5VS7B9eQ1LfdvzsV0oXdyPvut8QEXtHxIiVC+ffkMGj8SW9ARxKluvNUMeau4KImLBkwDa+jqF4jcZzviTbAaw/jJbX8hr/9ojo2fgeLaYnb1z8twQ2f0P2yfunpHvIGxiPAvcDYzqw2RoioteQfg9UAps9JH1egpnrkTdn2yawCVB6Z35EHodHKY/1jdRbOVTrKrJP8mj1rbRrkfSApEWBdclWIBcD+0TEfBExSjmHvgmG7hhu1h1VzpXXJjM2byyBzT8Ci5ItMt4s+2wLHFISCMyGOwc3zbqRygXh1BExTzlBmxigNE1/ApgrIo6JiK3JZs9IurC2RZsNpsrne9SImKRygnU1MG9EjCzpQUnXSzpS0mpkH82JgB2A1cnP/JLAvuBgfsWbwJ5kUGhn4LmIOCQiDiMnpc8MHFL2/ZIMME1Yx0JbXURMB9wFbBwRk8OPgshD4ySy/2Y7fVYjIqaX9H3JEOlVHn+W/Dl2d/l6W+AxMnsE8j3qBVxGfr9bjSJi+YgYU1L/cvHb65ef9WOVIOdYwGZk5ng7ugUYCdg3cmgQSt+Vn43flF+j17jGLknSecAc5I27Dcg2NjuXdjeNfVzqaPZjnwEjSXqmfH0kWSn1H+UAtNHIoX/jkNmcZsOdg5tm3URpzqzSf+wq4EFyQMgVEbFbKa85EriIbP58FPnDZqPy/CG+6DDrTJWLi9WBEyJi1YjYiyzFuxb4pprxVfbtJ+nbEvC8UTlxdxtgo4hYrbP/DXWrZp9ERO+IGDkixi8XydeR2YEHk31L1yWDau+RvUpfLuWhs5LtLf7T+f+CLkHAJ8DRwMkR8edSmjVkL5LH83nJfoN3SvpmGK+zle1EBtiPj4iRSuYakl6TtFbJ2uxF9mDsUTKKIasRBgA92+z9ajnlnOM04KOI2BF+yECsHp8H53Uax6x9y6+Jh/FSuwRJ15DtGDYlf/4tXAm+LUxmUT3tXrNDp2QH7wPMTmZ/70H2mTazjr0KTBgRs0bENsBU5PnjF2X7DOWx1+VhtdZJ3HPTrJuJiIeAUYFjgTHJkr1pyEEr+0q6JiJmLNveqTZQN2tllT4/CwPnksND+gPXA9tIeqvs1+Gk2IjoVe4mT0EG/u8ENmmnhueV93A5YGPgN+QEy4eAwyS9WfablJyOPqqkFyvP35DM8LxR0had/g/oQiJiFeAAYArgHLJn3hNDcpIfEfeQWbIrSfpyuCy0xZS+mmeSF07LkIH0vSQdUbYHGdD8PiJ2IdtObEtOTt8aWBqYWtJ7NSzfilKGuBR5DrIWOd37b5KuKtt7kjH8X8xIjoiZyMEv25G9Vtvy4qW0utiGfB/GITOZe5DneJ8AS0p6LjyJ+FeLiCkkve730ixVzqHHlvRxREwC3AO8TwYyTwP2kPR1yS7fjuyFPYWkj8IzHawTOLhp1o2UoOWFZBDz8vLYRGQp11pkMOgG4F+SHqxtoWZDqKOAZUQ8R/bg+5w8qboUeKiRHVT2GYH83L9VyRqalCzxu1PSZp30T6hd4yItIuYig7vvk+/DxORE4g/JEuiDOrqYi4gFybvy3wFLu5/hL4uIkcns113InsfHkYNuXmy8x82f7cbXEbEOcDqwuCrTkbu7iLgZGIPMUJuIrC5YmgzCbyXp5sq+YwBHAGsAI5LtFQ6RdGxnr9s6Vs5BliAzC/9IZtlvK+nVsr1X9Zg9iNe4jgzm/UnSJ8N5ybWq3IDqAcxJZmU+AzxJHrODHCy0FLAS2SbkfuDfkh52AMHMhqVqgL+02jmAvM58uZxPnkBOSb+UbBvyBbA5eez6l6SDfZPAOouDm2bdTETcB+wp6ZbIRvPflcfnJO+gLUkGe9aWdEGNSzUbbBFxKhkQOrgELEVmd71MDllYgxyIcypwtaTnyvMWAPYmM78eKI/NQWY2r6AyFbWdRMRdZIByZ0mPRMRSwDVkZtTMZAbcgZKubHreSMD/Aa9Ieq1TF93FlQuCfciy/yfItiC3SPrfIPYfkfw8Xw9s+UvBn+6iZOjdA+wt6Zjy2A5kQGdF8vN3A/mevFa2T0EG6CcAnpX0fA1LtyZNGbYjAWsCW5A92ACOAXaqnKP8KMhZCfIvC1wJLCfp2s79V3S+SnbUgeSArJHKpqeBU4ArKm0YiIjRJX3e+SvtWoY2uBIRowBft2u2sLW38vn/A/CepAfKzcfJyBvcr5R95iSP7+sB45envkFOUD+y7NNhRZXZsObgplkXV7nLPx45JfNw4DZJR5ftfVSmHpevlycvsNdrlzJH69pKYOgG8iLvN8rhWI1tjQvBhYB/AfMDd5CZnO+QF9MLSZqw6TVHbafPfyVQsDBwFtlP7MJy7HiULAHeg8wwXI9sFP8EeQLrXknDSMl+/SewENkb+QTgAUmflu2N/6f9yIz730l6oa71drbIAXj3kgMJNoyIxchqhLXJ3q9/JjM6JycDxDs4S631RcS5ZK/e58js2tmB3wHfkjdZTi77BXltMqDy9dNk1uL61XOZ7iQi5iNv3n1cvp6WfK9OIgO7ArYElie/P44C7pH0dj0rbg+lr+/RwJmudrJ2FBEzk4kEk5LDOzchrx/Pa9pvFLIl2pzAx2S11Ntlm7PJrdN4oJBZF1buRA8oZbYXAg8DywFHRMSWAJL6RupTvr4SWFXSlzEETf3N6lKyVFYFVlMOElkiIi6PiKk1cNDI3ZIWADYEpgXOJi8KlyQH4/wwNKsEkNomsAk/GsY0OTA28Fo5dqxMZmseJelZMnD0MtmD8x4HNodMCcY0P9an8WdJ9wCLkDeYZgcuAI6KiMnKdpU+VnuQmW0vNr9eN/c+cCuwfkScQ15UPQE8qhyUchgZfD+R/Kx+GBGb1LRW+xmlrJqIWJ0cYniQpDUk7cTALJ+ngBMj4v6I+L1S9SJ4R2BK4J/dOLC5BHAfcGBE/Lb8nJqXPAYfK+kmSTdLWpEsRR8ZOB/4Z0QsXlpfWJPG+W1EbBoRl5QegIP73MZxfBfgL2TrG7N29BxZ9Xcv2af9I2CMiBi3upOkryS9J+kGSQ9Wb7w4sGmdyZmbZt1A6Uc1NxnQ6QUsRvZkeoQs+7q97DcCef3cFiWO1j1FxD5kud6n5JCWfzZKG8v2nuTJ2PhkX82bOn+VrSlygvFWkg4uX19DDmxZR9L75UbJ5cDuwK2lpNR33QdTJZN+PmB1YC4yQPkAmSn7ZWXfMYDdyEDnlI3gTURMCawGnN6ObRMAImI9Mrg7Mtmj9B/Ac5US5rGBRcn3blnyPZ5DnpDeciLiDLKscRFJr1TLzyOiEeCfsey+o6TDy7aRgZvI/sD7dNdjUKlM2I68CfcdcAiZpby9pHnKPtWedyOQWd17kj/jpm2Uh9pPRcQLwN1ku6YO24AM4nkTAs+T1VD7d9fPn9ngiOwBfjY5GK5xnngqWXnyWdmnN9k6ZmRy6KS/Z6zTObhp1sWVE+ObyEnHp5THZiYvrNcBpiIbPO+oMgnZrCsr/QiXJktVFwbeBQ5uLpNpeo77/TSJiNHI8v3JJP1feez35GTvnSVdWOf6upoYOLBpBjL7cGQy63BickDO48DJki5ret7okj5vCvr84pCV7qhSlt+H/L5+B5gaeI3sk3sN8GYl0DMlmRXYR9KBtSzaflZE7EX2PR5XZRhQuQE1oPxfb0IO3boQOE7Sp5XPwYLA0422Dd1VCQrMAWxPVim8S9502gC4QR0MH4sc1LSMpFPrWHMrq3x+xiYrOE6SdO4QPvdMMlCziKR3huNyzVpSB8ebWYGXyPYYu5LHqNPJTPInyZ/Vt5I3cneqZdHW9hzcNOviImJM8qLvAknXNv0w+h2ZDbAcMB7Zs+qc2hZr9itVswhLZsWqZJbbLMCDZIbP/WV72wc0KxdqU5Pl6D3J4FCjF9LuwF7AyWSWyjrAxJImr2vNXV1E3EL2ntpd0q0RsQzZq+p/QJ/y5xMkPVzjMltaZA/pXcisvlHJC6klyX66jX6DH1T2b/vv9bpFxOLksLGXmx6fh8ycuwbYVdJL5fHGsWlT8hi+vqS32/n/spSkL0f2iv4D8CjZouJeVYYGNb9Hzq4fqPK5moQMDv8JOFHSeYP7PpXM+/uADSSdPXxXbNaaKpUoCwCPNSojys2packKqg3Jm4/3kMHNOckb5l+087Hc6uPgplkXFhGbkb3HvgCOAPYlp8r2qGQAjUSeLG8EbOHyJevKooOJpxExG9nDbXlgHHLC9K6S3qthiS2jkkn4W7J8f2bgG3JAx+Vk2a/IoTaLkyWOjwK7SbqlXbMHh0blgnoB4GIyGHehctjVXcCXwMHkMKH5yF6Dt5MZ9d8N4mXbWtONunGBZYCdgWnILL+TyKw+98OrWTnGPABcRra4+LaybWSytHczMsB5Ohmsez8iZiSzOueQNFPnr7x+EdFb0nflXG1GSY9FDtZagfy8j02+Z2cCz6qb9h4d1iJiF/J4C3Az+bn8YHACLuWY/S2wotqsP7dZVblJ8Dh5bni8cm5DY1sf4LfATuSwv4eBwyVd6PNHq4uDm2ZdWOTk453I5vM9gG0kXVS29QSolDM1Sh99h9+6jEqAbjKyv94SZFnMhWRg43+Vff9ItmNYD1izufy3XUXEQ2T227HAmMDKZIDoBWBfSdeUIMOYwDuSXq9pqV1eRPyF7IW3sqQHStbmFcCSkm6LiGmAu8gBFXdI+kt9q+1aIiLIgVgbAJuTQZ8dJR1T57osRcTfgW8lHR457XsSMojZr2zfifze6EmWML5PlmJPAqwu6d8d3bzqrso52oiSvipf/5s8Li9egnA9genIz/om5Pt1FJn5/ZrP435eaVnxJ2Al4I/AncAOkh4p2zsMckbExsApZDn6nZ23YrPWU1o77E0OQhwd+A/ZPuSRpv0mJluNvNvpizSrcHDTrIuLHBCyMjlAZR4ya21nSU+X7b2A710aYF1ZRNxG9r96ERiJnKB7PZm5/KDK4JXSR3JBSTfUtNSWUoKWF5JBzMvLYxORWVRrkYGFG4B/SXqwtoV2E5H9jjeT9Lfy9c1AP7K88f2ImILMmt2GnAD+jW84DZnIgSqzkkOvTpYHhtWqoyBRRDxOHqP/AVwv6b/l8cnICeiLklUmrwLnSrq4M9fcCiL7G29PZhXeT7ZV2Zx8P76u7DcieW63Ezkt/WXgT74JNXgiYm4y63sj8ubICcBekj7qYN8gA8h9gK0bgXmzdley87cig5xfkoP+jm/3CilrPQ5umnUTJWCxKRm0mJAsOd1X3bwRv3VflVLfxckTqS3IgCbkxcq+ZCn1meQQnKcaWTDV53fuqltPRNxHToq9pVECWR6fkzxZXZIMcq4t6YIal9rlRMQIkvqV0ttpJT0ZESOVoOVYwEXAV5JWLPsvBJxFZhBdUd/Ku76IGLFa/mytI3Ko1pHkseU+cqruTY1M+4gYgyz7/b7SQqetgvzlWPAvskddP+AVMlvw0+bKm7L/6GQW4hKS1qphyV1KU1uLkcmbo6uTN/W+Bg6UdFQHzxsH6OtydLOBbTMqX68IbAksRBmSCJzjEnRrFQ5umnUj5a7zrMB2ZDbnyGR57qW1LsxsCDVdmKxGNi5fW9KrlSbno5OZLzuS02XPIwcHtPVk08r7Mx4wGtnv7jZJR5ftfap92yJiebLkfz1f0A2eEnzo08iwiojzyYno60r6uLLfCeSxeDfyM7oZ2XNzIgferTuqlpaXtgxHA1MBlwDnAnf5pmsqQd4zyP6afcmbd3tJeqpsH6FS1j+qpC8rx/e2KeH/JZX3pAfZA3BDssLjO+A0DRwyOAGZebY2sCzZm/tf9azarPVUWkGNL+n98lgAvSrHoj7kcX1j4A3gGWAV32y0VuDgplk3VMr2/kBOQd7VfYOsq6hepJTflyczW5aQtEBlv2rwcxpycMDKwHjV4FK7qZyYTkpmCM5J9tIU2ZP3+LJfAL0bQc7K83zBPBgiYhFysvyNwJvAvWTg8lxJ31ayjhcie51OSvaIfIPsE3mpG+5bd/JzmfKl3+a+ZLbmqWRrhgfaKVOzWeUYcRp5I1rAYmRP0rPIzMJPyr7TAqeRgTpP725SOV/YB9gWGBH4hOw1PRrZ93iLRgltOWf4A3CmcpiTqzzMitLu7HqyZdE5kt4oj/ckB9b2K704nyB74F8j6TB/H1krcHDTrBurlEf6B451OeXu8KNAY4ru3sAx1ayfpiDnFJJed4AOIuI6YG7gbKAXedE8G/AIsJOk28t+IwBykG3IlIy048gLZ8iS0iUlfVyyh2gEbiJiTGBF4HtyCNYjP31Fs66pEljqBUxGHnf6kMeaLyql6KOS/Qw3BD4DFpX0eD2rbh3lRtNIkr4uVQobAguSmd6HkIG5bYG/A5OrMkTPfvT5m5EcVHUc+Tl7k+ztuizZb/MjYMPGz77K831+bG2rBClpqjj5DXkDahTgMeB84ApJX1T2mYIcvLWNpOfLY/5esto5uGlmZrUrTf83Bv4p6c3yWE9yeuzywNZkBuJRZPn5y5VebT6hqoiIyYGbgMMknVIem5nsN7YOA0tEd2y81zbkSm+2K4H5gY/JwU3HSHqxbB+B7Cn4kxJSf2atu4mIPcjA3FTloS/J7J9zyFL0z8p+c5A9Z9etY511a7oh95MbceW4sh7ZG3JOslxdwCGS9vXNu45FxNFkL+7lVAZqlsdHA9Yke8BeLmltH3/NUkTcA/QAdiWz6b+tbPsbeWOlD3lOeaGkG8u2Fcgs/PUlXdvJyzYbJAc3zcysdhFxBPBXYFNJpzVdAI4CzEieZK0JPEteqNwo6e2altyySqbgscAFkq5tei9/B6wLLAeMR56YnlPbYrugkmkVJWh5Azn1eSRgduB1Mlv2lEowZzpyavQFki6radlmw1ylncXSZKbPFeSAtxHIDM6tgP7Azh2VU7djoK5Sjv5n4I/AFMBVwN3Am5K+KfvNSGYdjl8eP6b6/HpW3zqa34eIOARYR9JE5ete5M2lxs++vYB9yMFvr9SwZLOWEhG9yRve25AVUmeQ544vN4Kckb3b9wZWI2/ivg58SB673pY0Zw1LNxskBzfNzKx2pQR9FTIANCAijgTulXRxZZ8xyHK9XchJjdcApwO3yoNwAIiIzYATgS+AI8g+d0H2SWpkuo5EBjc3IvuQ+ULvV4gytTsitgK2IPtrPgIcD9xOBnj2AiZWadBv1p1ExL3kBe/Wkt6oBPBmJEurlwFWknRFO/earQSDFwBuJbMy3yQDCy8CJ5DZri931I802myifEfKecDnlaBl47O2OXnM3QA4v/Lzrnfpq7khOQRlKUl317R8s5ZSbgJMSGaL/41sn3MYWYnybuX7aG5ySvocwDjAg8C+kp5u52O6tR4HN83MrKVExFzkoJbPgDuA4yQ9Wrb1JDMOVyCDnFMAk7kPWYqIhYGdgHnJUqNtJF1UtvUE0MBJxqNL+twXzMNO5DTencgWAKOSWWsj4ZJS66YiYiIyUPegpA2qx5PSf3Z24GYy037tGpfaMiLiNnKS957A88AMZE/N5YG7gGOAuyW9W9siW1A5vp5PBoFvl/RhZdvU5OcwyBLbWyV9ULaNTlZ+7ADM5PfV2l1EzAK8Jumr8nUvYGby+2Q94ClyUOetwCeVmwkTky1H+kv6uo61m/0cBzfNzKw2EbE98IKka5oe/z9y+vSi5InUZcAJkt4p23sD05IXKv92gG6gMrhjZTJjcB4yE2jnRh+y5nI9G3yVLKGZgT+R7+9L5EChWyv9YucE1if7xD4r6eDq82tZvNlwEhFPkxfAC5Wvm4dqXQFMACytMgG83VSOHeOQFQf3No4LlX2WIVtYzED2RT6dDOL5mMEPx9XryRtHl5IT5B+tBGh+S06an4I8Z7idbGOzKnk8PkPSDr7JZO0sIiYkq0vuALbUj4d0jkr2Ed+VPP++isy+f6LxfWbWyhzcNDOzWpSTqP+QvdkuBvaS9ELTPiuTQc5ZgNfIC5czmktgHDT6qZJRtSn5/k1IZgPtWz2RtcHXKL2KiHnIvoIzkBONJyi73EU22P+3pO/Kc364iHYA3rqbSsBuG3LY2wnA3xv9Zss+45DfF5MC87Xz90Dp17sBefPpGkknNh6vZEb1Jo/ZRwNHStq+puW2pNLCZgtgD2AA2YblEvImad+SWbYt+R6OWZ72NXABsFn5vPpYbG0rckL6RWT7i81LX/vJJT1btvcgS8//TAY5JwZOIr/XXvGNAWtlDm6amVktyoXe9MBS5DCh8ck+kYd2cCd5U2BtMpD0AHC2pKs6e81dTXmPZwW2Iy+oRwbWlHRprQvrwiLicbKn6UHAvWQZ5A5kU/6vgK08OMjaQSW4OQmZ3bMiGeQ/i7x4Hoc8bu8H7CjpxHbOmouItcnp8QBPA2tJ+m9lezXIOQHwWenn62BckxLE3JM8N3iGDAbfWMmeH40MznxBDkF5vgQ/2/bzZ1ZV6Ud7EtnP/hjg8kZv8FLlMxnZn31zoCewvaQza1qy2S9ycNPMzGpVMjFmI6d4bwR8RA5gOa96ERIR05AnWOsCT0n6Yw3L7ZIiYgTgD+T7uqukO2teUpcUEYuT5Y5/kXR+07bpyInRowFLSnquhiWa1SIixiWz6dYExiIn6wZZQnyDpJVrXF5LKL0fFwc2JttaXENmvD4o6YvKfg5mDqZSiv4P8ufbTeS05/skfVTrwsxaUESM3OiVGRF9SsB/E/IG+DjkzakLyMzyRgXKiGQ/zv2AM31z3FqZg5tmZtYSyhTU+ckA5vLAPcBuzZNNI+IPwEeSHncWxpCJiJEkfeMy/qETESsC5wKrS7qm0VuQPJ/6PiKWJgMWm0o6rbaFmg0HldYM0wJLAGMD3wJXS3q+7DM3mTE3PnmxfDZwv6SPfLz+YbDbxGQm/Q7kzZDTyWzXZxsBBft5zT/DImJ14ACy/cFZ5OfucQ89MUsRsT5wBrCTpMOato1IDjXbmBx2di1wvqR7K/uM7O8na3UObpqZWUuJiEmBxci+WXORFyl7S3q91oVZW2qa/jw72RbhFEnblMd6AgNKee5UZObD6ZL2qm3RZsNYU8n0I8BvgO/JfoafAFcC+3c0MMg3U36qVCw0qhE2Bt4hS6uvlfRynWvrCkrZ+ffVYEvpHbg9sBNZAXI+cIw8Hd2sUXmyFdkK6k3gr5KubdpnOmCfss9rwL+BiyS91KmLNRtKPX55FzMzs+Gj9IT8EUlvkWUx65E9tRYHHouI3Uv/TbNOUTLNBkTE1BGxN/AycCGwVUTsDCCpOnl+RmB04NXy/J98vs26okpgc1dgcnJgy6jkxfIDZDn6/RGx5aCe264iYtyIWC0ilo6IRQAk9ZX0DJkttRTwX+BIMthpTcpNJCLitxFxGDnp+YWIODMiFo+IsSV9JWl/MvB+H7AzmVls1vYk3UwO4/oLOQzx6oi4ISKmrOzzoqS1gdXJHuLbAhdHxEI1LNlsiDlz08zMOl0jG64Ef6YkB1G8CbwEvNoYKFSCmb8B1iEv+vaQ9I9aFm1to2QFzQg8Vspw7yGHWS1A3hg+GlgFeJbsQ/UmMDvZM3YMSdPXsnCz4aApa3MP8pi9XaNPZESMRZZZr0Fm2z9ODoa7rpYFt4BKCf+qZK/jWYD+5I2Ph4CTG72PS3uLsckg552SXnffzYEa7QwiYkLgfjKo/gT5fs5ItkA4izw/+LDyvEklveV2CNbuOmjjMAewCdC4GXU42QaqX9PztiaDoQtK+ryTlms21BzcNDOzTlcJbu5M3hmeEBDwIXAecDU5FKDR0Hx8YG7g1jLd0WWONtxExALAKcDnlHJbMpPhynKRPS2ZqbY62Wi/4R5gF0n3NoIbnbx0s2GuMhV9NWBRYEJJK5bS6v6NwFH5vlgN2BD4TtIs9a26PpWfbyORNz4eA/4FvEJmfs8K/I8cQHa8pFerz6tp2S2r8vm7CJiJDKzfGhFjkj021ybPI54AVpT0Xn2rNWtNlZsEEwC7k8fqZ8kbBBMAn5HnLyc3Pa8xVd03CazlObhpZmadqnKCNRN50XcRcBrwJPAoWfL4Cjlk4VpJ/216vgObNlyVk/81gE3Ji+nXgDUkPdS032zkZOiZgLfJgPyHmHUzJXD5NPAB8D6wdKOXYePit7Lv74EvJD3ajhfElWDcwWRVwnqS7i/B4A/IGyZTkZngj5BBzqMlfVnboltcRExC3jz6N7BrNcOsDCPcDDgY2EbScfWs0qx1VY5L15DBzEMkXRwRE5N97tcD/khmlv9V0v01LtdsqDi4aWZmtYiIS4HxgM0lPRsRcwIPAnsAywD/R05svAm4VNL7tS3W2lJE7Ev2fW0EHQ4hgxCfVfbpSZbpvt8o0zXrbiKiN/BXMlg3Pxnw30XSJWV7AL2ayxrbVckqvJK8gbe3pM8i4gzy59pcZEn1VcAMZFn1qu1cxv9LyrCg54DrJW1WSvmjGjiPiGfIz+Wf2y2gbjY4ImJG8ph0EHBgtbqkDBM6AFi1PHQXsASZhe+AkXUJHihkZmadrkyVnpo8eWpMhj2GHBJwBFku8wywHHAsMG4NyzS7g5xkvB7ZKmFP4KGIWKMyLGgmMvtq+3qWaDZ8lYyf7yQdQh6bDyybToyISyNiHqV+EdGrxqW2mveAviWwORmZDX4M0KNkuj5NTknf3oHNX9SfLKFdMSIWlDSgVICMAD8E318i+3GOVOM6zVrZt8B3ZDuR/pF6QQ4TAtYn2ztcC7xYBp85sGldhk9AzMysDt8BzwP/Lb18fg/MSw6lkKR3I+IB4GKy1PcZl6NbZ5N0W+PPEfEgcCNZqn4+sEFEXEuWcQ0gSyLdNsG6nernWdJbwJ7ls78VmdnzfxFxHnCUpP/VtMyWIunTiNiO7GMH8HuyJP0xSV+WgMInZEDuVHDPzaqmIVbjSfogIvYHLgFOLhPTr5D0cXnKXOSNppvL++vjsNlPfUDeJNg0Iq6V9DjQv9IjfCTyfOZq4GwY2EqqrgWbDQlnbpqZWR3eITM0Ly1f/xZ4A3i5ZP+MBowITFMNMJnVRdLbks4mg5s7A5MARwG/Ifu8fVsuEHxBbV1eKfslIkaNiFkjYvny5x4Aku6XtC6wNfAUOdDl2pJBZ0AJ9DZ6kb5OHjP+r3w9B7A8MHIjcODA5o80Pn9/AW6PiK0l3QXsQPYLPAG4JCIOiIi9gTOBkYFdy/Pjpy9p1t4kfUVORp8EOLwc18eslKfPAIxGthj5tjzHgU3rMtxz08zMhrtKI/MewDiSPmjavjKZkbEGcA2wMHAScI6kPZ2FYa2kBHAmJnvG9ivZD2bdQmXo2/hkW5ClyWqvAcDxZEbPC42L34gYHdgOeFXS2e2YgdiUadgDGFXS55XtEwPnADOTgc6JyCyp30p63dlRA1U+f1OTLT8uAw6T9EzZPjrZGmFZYByyFP0y4CRJN/u9NPt5EbEJGeTsD1xHlqL3BTYnbx5MKukbn3tbV+PgppmZDXeNi92I2ApYBThX0mmV7dORZTCTkL02pyHLZ2YuQVGfYJmZdYLKzagrgLmBc4FbyLYhmwNvAccBFwLvVCelV5/fuauuT+XnWx+yT/RmZNDtW+Bk4CpJH5eJ8zuTP99eJwflXedgXMfK0MGpgHXK0MEeZL/S/mX7pGSAuL+kV2tcqlmX0HQTZmpygOfqQB8yW/pO4AhJV1ZK1c26DAc3zcxsuKpkYcwF3Er2KzxQ0ttN+41KnmgtRl5IXybpYZ9gmZl1jkqgbkHgBuCvkk4v224ig3YvkBfEt5OZnHc0Z+O3k8p7djSwIdl25V1gLGAW4HHyfbyr7D92pVdk2wWDB0dETESeL9wJbKUfT0UfobSv6QHM2MjoNLPB0xTknBiYHPgUeEPS13WuzezXcHDTzMw6RUT8hyxr3FzSi00nVz9krkREH0l961yrmVk7i4ijgDmBLSU9FRF/IifoLgM8CZxIlgUD/EPSHvWstF6VwOYcwEPAfmQJ9dcRMQPZYmU7YHpg49K3135BRPQEXgRulbRpRAT8eLhVRGxL9npdXtLT9azUrLUMbluQ8j0V7dZCxLo3DxQyM7PhrlzkzQDcB7wCAy9SSpDz+4gYLyJmd2DTzKw+ETES8A1Zcv5UeXh/4Arg/pJ1fyjwIFmCfXJ5XtsNcakEBtYG/gdc3sh8kvQ8OehmGzKbc4uIGKWOdXYl5XMkMoi+UkQsoKIEPRuf0ZHKfh/Wt1qz1hARS5bkgMEKVpZvKQc2rVtxcNPMzDrDZ+SFyBfNvcUqmRjrAodGxCSdvTgzs3ZWDUxK+obss3lM2TYbOWTiLkmflt0mIvu03S3pjfK8di4H+4R8j14BiIheJYOqv6TbyJYr85EZnPYzKkGX44ARgcMiYuVSzt84f1gM2Aq4XtJ7pUTdrC2VNg7XA5tFRK9f8TpRfh95WK3NrDP5B4GZmQ0zEdE7IkbsYNMXwPPAumV4UGP/xonUaMCkwNhk3x8zM+s8jWPxqhExhqSnJN1dtr1D3pyatOwzMTAXebxu60EulUDC42Swd7eIGKkENQdExAhl+9fAV8CENSyzS5J0M5kZPCNwOnBqRBxUWiacRr6fO9W4RLNWsSfwHHCzpP7lXHyIjzUlO3pm4OFScWXWpTi4aWZmw9I5wEkRMUOjfAxA0lfABcDMwO4RMVNE9K5k+iwGrAVcIekrZ2GYmXWeEogbDzgbOKjccGr0PvyYHC60bUScB1wE/I2cqtu3eqxvF5V/89oRsRzwBHAzsAMZ4JwVoAy+GQeYnew5/UAd6+2qJJ0HzAqcB/wO2BLYHPg3sG7j8+fyWmtXEdGbvPn0JdAY1HkkQ98G45/kza62HRJnXZcHCpmZ2TAREX2AvYFNgb7AYcAFkt6t7LMTeYf5U3Jq+vtkeeO6wPuSflP28/RYM7NOFBGjAoeTE793lHRUZdv4wBbklPR+wPmSDi7b2vJ4HRETkFmt+0vauwQxzwaWAh4F7ib7Rv4Z+AM5aGjf6gA9G3wRMS4wDvCRJPfZNCsiYnPgeOBo4EZy+NtmwNmSvhuM50fJ2lyqPHc5SdcMzzWbDQ8ObpqZ2TBTSvBmArYnByw8Rt4Fvl3SJ6VkfTHy4nlp8m7zd8AZwEmSHo+IXpL61/IPMDNrcxFxNPAXYDfg2MbFcePYHBGjS/q8PDZYk3m7o4j4P3Kw0g6S7q88vgawO9lfcwRy0NApkvYr29syGGxmw0c5996VPPceBXgZ+JOk1xvtnwbnmBMRTwPPAOt4uKd1RQ5umpnZMFeakS9MXhwvDFwOHCLpgbK9NzA62UvrjcZACjMzq0cleDktcAJZDryGpDtqXlrLaGRdRsRUwMrALsDskt4uk4r7Vvadlxw09EWjgqGdg8FmNnxFxDFk24aeZJ/7nSVdXbZ1eFOlkrX5N2B/4HeSHuvEZZsNMw5umpnZcFPKyFYkLwAnAE4iS2ded1memVl9fi6DsJRYXwtMDaxdhrtYERGPAHOSLVZ2k3RSZVvvwSkFNTMbliLiRvKY9CCwBjA32S95Z0lPlX1+ctwvx/vngVOA3X0DxroqBzfNzGy4KtNkpwA2IYcBfAwcDFwm6f0612Zm1o4q2TojA/8HvAL0lfR2ZZ9ZgQuBz4CNJD3vkupUJgofQvbXBNgHOEfSq2V7D7IStO3fKzPrPBExVmkDNRewPLAR2dv+OGBvSZ928JwTy76/lfRWZ67XbFhycNPMzIab6oVwRIxEljnuCKxATpfdU9KN9a3QzKx9RcT+ZH/I94H3gNeBW8ufbyLbilxcHltH0sc1LbUlRcSiZNBgRuAW4HTgekmf1bowM+v2BpGF+UN7jDItfQFyENya5I2qIyQdUtl/VDLh4C7gIt+Qsa7MwU0zM/vVKn3IxgCWIac0vgM8DNwp6aHKvmMAvwOOBI6UdEwNSzYza3tlKE4f8gJ4DmByYDbge3IwxW3ADMCkwAqSrqpnpfVq9MosE9JnJt+jp4CnJb0XEX8hJ833As4hM15vc3mnmQ0vlQz8uYGVgPmAt4AXgIslvVT2mwD4PbAtMIWkyZpeZyyyN7CHeVqX5uCmmZkNMxFxNpmV+TrwDTAL8DhwDXCBpNfKfj2BsSV9UL52qaOZWY0iog/QAxgNWAQYC/gDGcx7WtLq9a2uPpWbd5MBFwG/Jd+nIAMJ5wH/AvoCxwAbAgOAhapT1M3MhpXKALj5yGPQxMBz5A2qUcnz8BOBEyR9W54zPfC9pJcbz69p+WbDhYObZmb2q1Qu/JYEriYnpJ8s6YuIeIrst9kLuIfMaLmyo54/ZmY2fFWndUdEb/Im07u/8JwJgW8lfdo43nfGWltNRFxHBnqPA+4jg7+7kFmv95Bl+6+XKel/k7RWbYs1s7YQEY8BnwAHSLqt9LnfAvgLef69r6RD61yjWWdxcNPMzIaJiLge+ArYUdJrEbEKcD45cOH/gP3Iu8ovAvtJeqS2xZqZtZmmHsh/JcsYpyKPy0cB91V7apYM+wHOqoeImAL4D3CopOObtq0LnAA8Bqwo6cPKtrYNBpvZ8BURCwPXAttJOq1p2yRkRudswBI+57Z20KPuBZiZWddXLvzGAd5slJ4D/yRL+O6WdABwGTABOaDigzrWaWbWxnoARMTfyeneo5DH5TGBK4BjI2KBiBgRQNL3Dmz+4Auyh/S7kIHikiGFpHOAnYEFgT9Wn+TAppkNR73Kr6/hh+NSj3JT5X9kj80xgcXrW6JZ53Fw08zMhoX3yIvjqwEiYgVgfOBMcjAFwBvA0cA0kt6ICP8MMjPrBKUc/fuIGJ9sHXIa8EdJfwMeJPtFLgxcD+weETOWzM22FxGbAR8CSwOLR8RIAKXfXePn2E3kJOI5almkmbWjV4DPgTUjYmylAWTPX4CPyz4TR0TUtUizzuILSzMz+9VKs/JDgNvLQ2OSF8tflgvAsctjCwKflud4iqyZWSeoHG+3BF4lJ+l+GhGTA5sBfyeHwb0B7E5OSV+/hqW2omfJoXifA2sBazUyWivv6wTkddXntazQzNpKCVa+SfayXxbYo0w9p5JxPyswHjkQTg5wWnfXq+4FmJlZ9yCpX+XL58hpjWuUCbzzAqsAu5QTrB4ObpqZdZ5yLB4deBl4vjz8L+AZ4BJJ70TEAcAewP/IIRVtT9JdZWjHKsDmwCkRsRpwKPARGTz4KxnYPAx+3N/UzGxYK8cXRcQuQABbAytFxDHAW8CUwHrAe5JOqm2hZp3IA4XMzGyYK3eH/wIcD3xJ3ky7SdIKda7LzKwdRMT/Aa9Ieq/p8TWAcSQdV3ol3wscARxZsuyXJ/txrizplc5ed6uLiImATYFNgEnIXpzvA2eQA5luj4hekvrXuEwzawONmyil3chSZGb574HeQH+yXcZ+kh70ccnagYObZmY23ETEGMCqZBnkk5I+8PRYM7Php/TKfIHMKNwJOK16UVu5IJ4MuAc4VdJ+JbNzQ3I4zu8kvVXD8lteuXk3G5kptTgwIbCppHPL9h5kYpUvssys00TExMCIwJxk9ubTkr6sd1VmncfBTTMzMzOzbqL00ZwdWA1YE3gM+Lukm8v2RnBzDOA+MstnR2AuYCPgP5LWdWn1z4uI3mRwc9vy++PAjpJuq3NdZtZefKw2Sx4oZGZmZmbWDUTErMCxwFNkwHJtsjzxxoi4OCKmrwzD+QxYgxxKcRk5SOgZYKvGy3Xy8rsUSd9JupasTtgcGAG4JSJubExUNzMb3gYnsFkyys26NWdumpmZmZl1AxFxCzA2sJKk18pjY5MZmVsB45M9Ng8t09IDmAgYlxwC919JX7h9yJArZf67AGNJWrvu9ZiZAUTESJK+qXsdZsObg5tmZmZmZl1cREwN3EH20Ny3PLYPcBbZf212YF0y0PkRsI+kM2tZbDdVsqN6Sfqu7rWYWfuqtB/5PbAHsIGk/9W9LrPhyenJZmZmZmZd39vA/4ClI2KKMvl8L2BOSf0kPVy+XoXsw3l6RNwVEQvVt+TuRdIABzbNbGg0SscjYvaIWO7XvFalVP0Y4EvyhpZZt+bgppmZmZlZFyfpW+Bs4LfAjcBpwE3A3ZV9PpN0I1mivgHZJ/LOiNi+0xdsZmY/kDSg/PESYOeIGKe6vRL8/NkYTmk3QkRsDkwDHFR+Pph1aw5umpmZmZl1A5KOByYB+gBjAhMAf4yICZr2exs4jwxw7gNcAQMvis3MrPNERM/y+wbApMA/JH3UtL13RIzQCIIOKshZytHHAPYHTgYeHs7LN2sJ7rlpZmZmZtbFNXqslT+/CTwLTAdMTGYCnQ3cJ+mLpueNIKlf9flmZtb5IuJD4DJgd0kfRMT0wLLApsDHwBvApZL+PYjnN3ptHkm2IFmoMVzOrLtzcNPMzMzMrJuIiAmBpYCbgU+BnYDtga+B04FLgSck9a9rjWZmliKih6QBEXEIsCawlKT/lm2PADMB/wX6AbMCA4BzgQMkvd/B680EPEEe94/zTStrFw5umpmZmZl1YxExHbAvsAbwFHAicIukF2pdmJmZERFjAa8C3wFrSro1Iv4BbE72SL60ZNjPRx7L/whsLunUDl7rBrItyZ8kfdpJ/wSz2jm4aWZmZmbWDUTENMAUwFfAJ83By4j4A3lhvABwkqQtOn+VZmZWFRGTAX8BlgdmAS4G/kQer4+X1Dciekr6vvRGvhuYDJhV0udNr3MzsJOkqzv732FWJwc3zczMzMy6qIjoJal/RCwOHEv22ewHPEZOSz9F0puV/XuTF9EPS7qvURJZx9rNzCxFRC9gXjLDfmXgG2AjSXdW9ulTAp3HACsBczaXpkfEiEA/Sd933urN6ufgppmZmZlZFxcRLwMfAocBIwPrkYHOt8hhQqdL6lvfCs3MrCNNA+HGABYH5gQOaS4tL5PTDyd7Ky8m6a3qNgc1rV05uGlmZmZm1oVFxBzABcDWkm4tj40IbASsD0wCPAqcKumqutZpZmY/Vg1sNj0+lqRPmrdHxNzkQKGHJa07qOebtZsedS/AzMzMzMyGTET0KL/3AcYiJ+h+XB4bUdK3ko4HViEvhKcCLoyIFepZsZmZNatkbEb1d0mfNLZXtk0NbAZMQE5DB4jOXrNZK+pV9wLMzMzMzGzIVPpkHkoOoRA5IRdJ30bECMCA0m9z14i4ClgLuLKG5ZqZGQNLxyNiY2Bs4GJJr/9c9mUJcPYBbgDGB3aV9IHL0M0Gclm6mZmZmVkXFRHbAtsCUwMfkNk8FzYueCOit6Tvmp7jIUJmZp2sUUIeEVMBLwNfkYPfzgFub/TX/JlS9YWAqSWd/XP7mbUjBzfNzMzMzLqwMoBiJ2AL4HvgWrK/5j1lewA9JfWvb5VmZgYQEQcAW5OZmEuRmffnABcCD0rqV/b74UZUB703Hdg0q3Bw08zMzMysCxlU5mVEzA7sTU7afQe4hJyS/nInL9HMzDoQEeMDlwKTSJomIsYCDgM2AF4BTgOukPRs5TlTAWMAL0j6uvNXbdb6PFDIzMzMzKwLqAyaGBARPSNi0YiYJyLmiIjRJD0haSVgPeBTYCvghoj4Q43LNjOzgb4A/gfcCjk4SNJGwPzl8QOBUyJio4gYNyJ6kVmeNwOj1bRms5bnzE0zMzMzsy6gMohiFbLP5kJl07vAf8hem1dX9t8J2ByYV9JHnb5gMzPrUESMWIa/NW5aNaamr00GOCcBLgaeBv4G3CppTfdMNuuYg5tmZmZmZi2ucUEbERORF7vPAteRZYwrlF/vk9PTT5X0VXneyJK+9lRdM7P6RUQfSX1/rodmRIwCbAfsCowMfA1MKOlLBzfNOubgppmZmZlZFxERpwELAGtJeqzy+P8BxwPTAatIuiEienmIkJlZvYZk+E/TEKG/AkcA20g6zsd0s0FzcNPMzMzMrAsoU9GvAL6RtHR5bASgvyRFxHjA3cAnwMKNibtmZtb5GkHNiBgVmJvsg/wqcCdw488FKiNiBuAkYApJU1VfrxOWbtbleKCQmZmZmVkXIOkzclDQ9BExdkT0AAaUi+cRJH1ABjcnAMZu9HIzM7POVwlEHghcAPwe2Bg4HVikef9yTG+YCpiVHCZEydp0YNNsEBzcNDMzMzNrcZWL3nuBqYGtJQ0oA4Z6SeoXEb2Bj4AAevhC2MysHo1jdkSsBmxAtg2ZE5iLPEZvHBEjRcQYETFhycr8oZempBuAFSRdW752ObrZz3BZupmZmZlZF1L6bm4I3EYOnXiqZG8uBxwJ3CFpQ5cwmpnVKyKeAR4GdpT0fkT0AU4F5gFuBZYC+gKPAUdJerB5AJyP5Wa/rFfdCzAzMzMzs5+qTEifApgYmAy4Gtge+JAsb3wCeDIiRJYxvg5s23gJwBfEZmY1iIhVgN7A1ZLeByiT0mcERiGP47sDqwFrAhNFxAqSPq++jgObZr/MwU0zMzMzsxbTyNyJiMmBq4CZyEBlX+AAMkPzGmBxYD5gNOBg4BpJXzRn/piZWecpPY/HIluFPF55fAtyuNDywLWlFP3CiNgaOJosYT+6s9dr1tW5LN3MzMzMrEVFxHXA9MBxZJbP78gMn/eAvwNXlEygkSV9Xd9KzcysWUT8TtKdla/3AcYBdpH0dUSMKOnbiBgXeAk4WtJeLkU3GzLO3DQzMzMza0ERMSkwJXCopBPLY9cAFwFbktN374+IPSTdVttCzcwMgIiYUtJrja+rgc3y9T4R0VvSd+WhvuX3iYH3KTEaBzbNhoynpZuZmZmZtabPySET78IPQyU+kXQLsBWwLlmqfktE7FjfMs3MrHgwIp6OiLkbD0REz1KmDkAjsNnIzoyIEcj2IlOQbUh+mLZuZoPH3zBmZmZmZi0mIjYDPgWWAeYpF8YRET0BJL1DZm5uSA6kuKQ8Lzp8QTMzG64iYkzgCOA74KGIOC8ixpb0fQli/qhytpKdOT+wNXC9pPtL0HNApy7erItzz00zMzMzsxYTEQsDOwHzAj2BrSVdVLb1AGhc/EZEL0n93aPNzKxe5QbTnOQE9HWBcYE9JB1S2eeHgW8RsShwJvA1sJikdzwQzmzIObhpZmZmZtaCImJUYGWyBH0e4HpgZ0lPl+2+ADYzaxHVG0wRMRrwZ2BHYA7gRfL4fWXZ3hPoDSwMrA1cKOl6H9fNho6Dm2ZmZmZmLSwiJgI2BTYDJgSOAfaV9Gmd6zIzsx9rBCcjYlsye3Nk4HtgUmAC4E5gC0nPNvYHekjqV752Br7ZUHBw08zMzMysxZVSx1mB7chszpGBNSVdWuvCzMwMyJYhkgZExLzAvcDewMmSPoiIucgeyluSQc7Dgb0kfV3fis26Dwc3zczMzMy6iDJV9w/AXsCuku6seUlmZlYREacDfwSWbGRolsd7AcsD5wMjlIfXbPRTNrOh1+uXdzEzMzMzs1ZQShdviIg7JH3jEkYzs5bzDjAW8Ar8ENSUpP7AvyNie7LVyGvAc3Ut0qw76VH3AszMzMzMbMhI+qb87sCmmVlreQAYBdgnIkaT1L/04Wxkawr4DNhe0hOl7YiZ/QrO3DQzMzMzMzMzGzZuB24geyQTERcBT0jqFxHjkMOFJiIzPH2TymwYcM9NMzMzMzMzM7NhJCLGBU4HlgWeAu4qvy9ZHttH0gGN6er1rdSse3Bw08zMzMzMzMzsV4iIyYCRyWTMF8pjqwB7ANMDI5LZmudJ2rlsd99ks2HAwU0zMzMzMzMzsyHQyLqMiJmAbYDNgP7A18BtwN8rQc65gU+Ab4B3JSkiekgaUNPyzboVBzfNzMzMzMzMzIZCRDwATAxcQWZmzgksBowB7C3pwPpWZ9YePFDIzMzMzMzMzGwwNbIuI2JDYGZgHUlXlm2jAIsAWwP7RsSnko6rb7Vm3Z8zN83MzMzMzMzMfkajP2a1nDwiTiSzNBeV9L+IGEFSv7JtOuAiYCzgt5I+rG3xZt1cj7oXYGZmZmZmZmbWiiIiyh9HiIiRmvpkfgRMCXwOIKlfRPQs/ThfBI4CJgfG7sw1m7UbBzfNzMzMzMzMzDpQmWa+K3BKRIxa2Xwz2e7vqIiYvOz/vaTvy/ZewJfABJ21XrN25OCmmZmZmZmZmdkgREQjdrI8sFVl00PA+cAGwD8iYtGIGL88ZzZgOeBtSXd14nLN2o4HCpmZmZmZmZmZDUIpRd8vIj4ADomICYF9JH0GrBMR/wV2BpYBnoiIr8mp6X2AVQEiopek/vX8C8y6Nw8UMjMzMzMzMzMbhMp09F7AdsCGwDGSTijb+wAzAusBKwBfAU8AZ0u6uZ5Vm7UPBzfNzMzMzMzMzAZDCWQeBGwL7CDpyKbtvcmMza8qU9VDDr6YDTcuSzczMzMzMzMz+xml72YPSX2B7SLivfJ7ACcA3wE9JX1X/vwDBzbNhi8PFDIzMzMzMzMzG4TSL3OApP6lNB3gDOAGYHtg8bK9Xwl2mlknclm6mZmZmZmZmRkDS8gjYnayf+bMZA/Np4GTJH3ZtP/JwPrAXyWd2NnrNTMHN83MzMzMzMzMqoOD5gMuAUYFXgXGBCYG+gHHACdIeqs8Z3RgP2BhYG9J17jHplnncnDTzMzMzMzMzKyIiIeB94D9Jd0fEZMDvwX+DKxStu0g6Yqy/5Rk3825gDUk/aeOdZu1K/fcNDMzMzMzMzMDImI2YDLgOkn3A0h6Q9K/gR2BtYA3gMsi4oiIGFnSa5KWAv4DHBURs9a1frN25GnpZmZmZmZmZmbpdSCAMeBHU9L7S/oQuCoiXgF2ADYBngDOLM/dB1gM+LRzl2zW3lyWbmZmZmZmZmZtr0w67wlcCcwNrCLp7sq2kDSgfD0OcAEwCzCnpPfL470lfVfH+s3alcvSzczMzMzMzKztKfUnBwR9DhwTEatExKhl24CI6FkCmB8Bp5IZnmNXXsOBTbNO5uCmmZmZmZmZmbW1kpkJgKQHgL+TvTdPAnaNiLkiopek7ysBzJ5Af3KaupnVxGXpZmZmZmZmZtZ2IiJUgiIRMQrwHTCSpM/LY+MCxwKrAc+R5ep3ArcCywF/BXpLmq+G5ZtZ4eCmmZmZmZmZmbWtiFgP2AaYFLgG2FfSW5XtfwD2BeYB+pFVsCMCjwPrS3oqInpK+r6z125mDm6amZmZmZmZWZtpBCMjYhMycPkhOfl8BeBGSat28JyFgd+Q5ejvArdJ+rCaAWpmnc/BTTMzMzMzMzNrOxExEvAWOfX8EEmvR8ROwJrAH4Dfk4OFPgGeaExKN7PW0qvuBZiZmZmZmZmZ1WBD4GPgXEmvl8fuAjYBHgPGBUYGXgBOiYizSqZmDwc6zVqHp6WbmZmZmZmZWTsaARiFzMxs+CMwJXAhmb25LPAFcCCwIoADm2atxZmbZmZmZmZmZtaOPgUmBP4QEb2BGYCdgcPJoULfAkTEHeSgoYMj4lJJnwzi9cysBg5umpmZmZmZmVk7OgdYCjgaENlfswdwXyWw2UvSVxHxIDArMB4/zvQ0s5o5uGlmZmZmZmZmbSUiRpDULyK2Ay4jhwfdC/wWGLXs00dS34joQwY/vyNL1M2shTi4aWZmZmZmZmbdXnUQkKR+5fd3gIvLLyKiF3BcRLws6YHy1MWAVYGrJb3jgUJmrSUk1b0GMzMzMzMzM7PhLiJGIocEvQu8D7wsqX9l+8jATcACwPVAX2A+4D1gAUnfOrhp1lo8Ld3MzMzMzMzMuq2I6FF+Xx+4HbgIuAM4H1ipsl9I+hrYDTgDmBr4M3AV8JcS2OzpwKZZa3HmppmZmZmZmZl1SyVgqYiYC7gReAj4N9k/cxtgRmA5Sbc39i3PGxUYBRgg6YOalm9mg8HBTTMzMzMzMzPr1iLiZnIo0FaSXiyPzUSWoD8oaeXyWADIwRKzLsNl6WZmZmZmZmbWbUXE3MBUwDXAq+WxkPQscCKwVETM29i/ZHqOEhE9a1mwmQ0RBzfNzMzMzMzMrDubmOyf+Zyk/o3szOJyYACwJPwQ2OwDrAYcFRG9O321ZjZEHNw0MzMzMzMzs+7sv8B1wCuQAcxG2bmkZ8henMtX9p8VOBjoK+m7pmCombUYBzfNzMzMzMzMrFuJiNEbf5b0mqRlKSXpHbgOmCsipi2DhDYAegE7DfeFmtmv5uCmmZmZmZmZmXU3N0fENRExTeMBSd8PYt97ydL0ZYH5gc2A3SQNiIheHi5k1to8Ld3MzMzMzMzMuo2IGA3YAVgPGB84DDhM0uc/85y7gd7AW8CMkmbujLWa2a/n4KaZmZmZmZmZdSsRMSLZO3NDYG3gE2AX4N8dZXBGxFbAMeXLhSXdU7I2+3fWms1s6Lgs3czMzMzMzMy6FUnfSnoY2J0Mbn4DXAhcHhHzN/arDAu6v/x+eQlshgObZl2DMzfNzMzMzMzMrFuJiBEk9YuIFYBNgcmBCYERgNGAM4B9Jb1Zec6UwAeSvoqInj/To9PMWogzN83MzMzMzMys24iIHiWwORFwHvAmsDIwKbAccDCwIvBoRGxVJqQ3pqp/Vf7swKZZF+HMTTMzMzMzMzPrdiLiIGAjYElJj1UeHx1YCjibzOR8A9hK0rW1LNTMfpVedS/AzMzMzMzMzGw4+BwYFfgQICL6SOpbpqZfFBETAJsBX5FBTjPrglyWbmZmZmZmZmbd0fPAiMBaAJL6QgY5y/YPgf7ARpKuqGOBZvbrOXPTzMzMzMzMzLodSf+OiGuAvSJiDOAMSS9K6hsRIwLjACMDH9W6UDP7Vdxz08zMzMzMzMy6lYjoTVarjgmcDywC3A/cBvwHWB5YG7hG0voREXKAxKxLcnDTzMzMzMzMzLq0iOgp6fuImAdYHViMzMq8EjgLWBrYGpiY7K85ALgKWFfSV2XC+oB6Vm9mv4aDm2ZmZmZmZmbWZVUCm1MCdwCjA08AvYGZyQzOvYCTgVmBAL4GnpPUr/H8WhZvZr+ag5tmZmZmZmZm1uVFxLXABMDOkm6LiLGB2cjy802Aa4F1JH1W4zLNbBjzQCEzMzMzMzMz69IiYhpgbjI78y4ASR8Dd0TE88CnwI7An4CLalqmmQ0HPepegJmZmZmZmZnZr/Q50BPoIakfZLk6gKR3Je0MvAWsVN8SzWx4cHDTzMzMzMzMzLq6L4HngI0i4rcApQ9nT4CIGA14BRg9IvrUt0wzG9Yc3DQzMzMzMzOzLk3SN8BRwLjA4RGxYkSMXRkUNDswFfCkpL4REXWt1cyGLQ8UMjMzMzMzM7NuISLWI4OcAdwI/BcYAKwPjAxMW4KbPSQNqG+lZjasOLhpZmZmZmZmZt1GREwK7A6sBYxEDlO+FDhB0n8iopek/nWu0cyGHQc3zczMzMzMzKzbiYjxgcmAj4HXnalp1j05uGlmZmZmZmZmZmZdkgcKmZmZmZmZmZmZWZfk4KaZmZmZmZmZmZl1SQ5umpmZmZmZmZmZWZfk4KaZmZmZmZmZmZl1SQ5umpmZmZmZmZmZWZfk4KaZmZmZmZmZmZl1SQ5umpmZmZmZmZmZWZfk4KaZmZmZmZmZmZl1SQ5umpmZmdkwERGvRcSZQ/nc2yPi9mG7osH+u3tFxL8i4s2IGBARV9SxDjMzMzMbcr3qXoCZmZmZdY6IWABYAjhS0qc1L6eVbATsBBwJPAq8MTz+kohYCxhf0pHD4/XNzMzM2lFIqnsNZmZmZtYJImJH4BBgKkmvDYfX7wMMkNRvKJ7bG0DSd8N6XYPxd18ILCRp0uH891wDzCppyuH595iZmZm1E5elm5mZmdlPRESPiBhxSJ4jqe/QBDbLc7+rI7BZjA98WtPf/atFxMh1r8HMzMysLg5umpmZmbWBiNiHzNoEeDUiVH5NWbYrIo6NiLUj4mmgL/Cnsm3HiLg3Ij6KiG8i4pGIWKWDv+NHPTcjYoPyugtGxOER8UFEfBURl0fEeE3P/VHPzYhYpDx3tYjYPSLeiohvI+LWiJi2g797q4h4pazvwYhY+Jf6eEbElBEhYFFglsp7skjZ3iMi/hYRT5e/+72IOCkixmp6neUj4tqIeDsi+kbEyxGxZ0T0rP77gGWAKSp/z2tN79OUTa+7SHU9lffpqYiYOyLujIivgX+UbX0iYt+IeKms483SS7RP0+suHhF3R8SnEfFlRDwfEf8Y1PtkZmZm1srcc9PMzMysPVwGTA+sCWwHfFge/6Cyz2LAasCxZftr5fG/AlcB5wG9gTWASyJiWUnXDsbffQzwCbAvMCXwt/J3rD4Yz90VGAAcCowB7FzWMV9jh4jYorzeXcAR5e+4ovydb/3Ma38ArAvsDowK7FYef7b8fhKwAXAGcDQwFbA1MGdELFjJUt0A+BI4vPy+GLAfMDrZyxPgwLL+Scn3n7Lv0BgHuB64EDgXeC8iepD/RwsBJ5d/w2zl75oeWAEgImYBrgGeBPYig9jTAgsO5VrMzMzMauXgppmZmVkbkPRkRDxKBjevGETPzRmA2SQ90/T49JK+aXwREceSg3e2BwYnuPkRsIRKs/cSiNs2IsaQ9NkvPHdEYI5GyXpEfAIcFRGzSnqq9OrcH3gIWExS/7Lfk8CZ/ExwU9JXwLkRsQnwvaRzK//GhYBNgLUlnV95/D/ADcCqQOPxtarvD3BiRJwIbBkRe5Ry/Zsj4n/AWNW/ZyhNCGwu6aTKutYB/gj8XtLdlcefKutZQNK9wOJkgHopSR9iZmZm1sW5LN3MzMzMGu7oILBJU2BzLDID8S5grsF83ZMbgc3iLqAnMMVgPPeMpl6cd5Xfpy6/z0NmMp7SCGwW55GZm0NrVeAz4OaIGLfxC3iEzLhctLFj0/szWtnvLmBkYMZfsYZB6Utmkzav91nguab13la2N9b7afl9+RJkNjMzM+vSnLlpZmZmZg2vdvRgRCwL7AHMAVT7N6qj/TvwRtPXjaDjWM07DsVzGwHSl6o7Serf6Gk5lKYjg7jvD2L7+I0/lFLvA8hy9NGb9hvjV6xhUP7XwfCl6YCZ+HGbgarGei8iM1JPBQ6KiFvJlgWXShowHNZqZmZmNlw5uGlmZmZmDd80PxARC5O9HO8EtgTeAfoBGwJrDebrfj+Ix2M4P/fX6EEGNtcexPYPACJiTOAO4HOyh+XLwLdkVuvBDF6l1KCCxD0H8fhP/p/K3/NfslVAR96EzDKNiN+RmZzLkEOjVgdui4glJA3q/TYzMzNrSQ5umpmZmbWPwc20rFqZDNYtKalv48GI2HCYrerXeb38Pi3wn8aDEdGLHCz05FC+7stkD8t7mvppNluELItfSdKdlb9/qg72HdT738hGHbPp8cEp2294GZgduLWpBcBPF5EZmreWX9tHxN/JgUeLArcMwd9pZmZmVjv32TEzMzNrH1+V38ccgud8TwblfsgijIgpKdO3W8DD5MCiTUtAs2FtBq/sfVAuJv/NezZviIheJWMTBmaWRmV7bzLLtdlXdFym/nL5/XeV1+gJbDaE650E2LSD9Y4UEaOUP4/dwXMfL7/36WCbmZmZWUtz5qaZmZlZ+3ik/H5gRFxIlpdfXaaGD8q1ZKnzDRFxPtm7cSuyx+VvhudiB4ek7yJiH+AYsrT6YjJjcwMyaDg02apIuiMiTgJ2i4g5gJvI92s6cnjPX4FLgXvJzMuzIuLo8vetS8dl848Aq0fE4eR09y8lXS3p6Yi4H/hnCT5+DKzBkJ2rnwOsRk5GXxS4hwzOzlgeX5IMBO9VytKvJbNexycDsW8Bd3fwumZmZmYtzcFNMzMzszYh6aGI2BPYnOy12AOYioEZnR0957aI2BjYFTiSHDq0CxlArD24CSDp2IgIYAfgUOAJYDngaLKkfmhfd/OIeAT4C/APoD/wGnAuGTxE0kdl4NJh5FChT8r2W4Ebm17yeHIo04bAdmRw8eqybW3gJPJ9/hQ4jSyzv3kw1zogIlYor7sesCLwNfAKcBTwQtn1KvL/biNgXOBDsmfo3pI+G5y/y8zMzKyVxC+05DEzMzMz63Iiogc59OcyST8p1TYzMzOz7sE9N83MzMysS4uIEUvmZtV6wNjA7Z2/IjMzMzPrLM7cNDMzM7MuLSIWAY4ALiGHC80FbAw8C8wt6bvaFmdmZmZmw5V7bpqZmZlZV/ca8CawLZmt+TFwNrCrA5tmZmZm3ZszN83MzMzMzMzMzKxLcs9NMzMzMzMzMzMz65Ic3DQzMzMzMzMzM7MuycFNMzMzMzMzMzMz65Ic3DQzMzMzMzMzM7MuycFNMzMzMzMzMzMz65Ic3DQzMzMzMzMzM7MuycFNMzMzMzMzMzMz65Ic3DQzMzMzMzMzM7Mu6f8Bxy+b9Yq3IHoAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -649,17 +659,67 @@ { "cell_type": "markdown", "id": "656e14dd", - "metadata": {}, + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, "source": [ "## Speeding up influences for big models" ] }, { "cell_type": "markdown", - "id": "3bf8c4dd", + "id": "070db55d-c448-4ca5-ab3d-3c38f196f60a", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, + "source": [ + "The explicit calculation of the Hessian matrix is numerically challenging, and due to the high memory need infeasible for larger models. \n", + "PyDVL allows to use several approximation methods for the action of the inverse Hessian matrix to overcome this bottleneck:\n", + "\n", + "* Iteration-based:\n", + " * Conjugate Gradients (Cg)\n", + " * Linear time Stochastic Second-Order Approximation (LiSSA)\n", + "* Low-rank Approximations:\n", + " * Arnoldi\n", + " * Nyström Sketch-and-Solve (Nyström)\n", + "* Factorization-based:\n", + " * Eigenvalue-corrected Kronecker Factorization (EKFAC)\n", + "\n", + "In the following, we show the usage of these approximation methods and investigate their performance. " + ] + }, + { + "cell_type": "markdown", + "id": "f74bba05-4a43-471f-b90c-745d0b0442f4", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, + "source": [ + "### Cg" + ] + }, + { + "cell_type": "markdown", + "id": "5bc615e637c1ee40", "metadata": {}, "source": [ - "The calculation of the Hessian matrix (necessary to calculate the influences) can be quite numerically challenging, but there are some techniques to speed up its calculation. PyDVL allows to use the full method (\"direct\") or the conjugate gradient method (\"cg\"). The first one should be used only for very small networks (like our current example), while for bigger ones \"cg\" is advisable." + "Since the Hessian is symmetric and positive definite (at least after applying a sufficient regularization), we can utilize the [Conjugate Gradients Algorithm](https://en.wikipedia.org/wiki/Conjugate_gradient_method) to approximately solve the equations\n", + "\n", + "$$ (H + \\lambda \\operatorname{I}) x = b$$\n", + "\n", + "Most importantly, the algorithm do not require the computation of the full Hessian matrix, but only requires the implementation of Hessian vector products. pyDVL implements a stable block variant of preconditioned conjugate gradients algorithm.\n" ] }, { @@ -667,6 +727,7 @@ "execution_count": 20, "id": "efdb4050", "metadata": { + "editable": true, "slideshow": { "slide_type": "" }, @@ -674,49 +735,284 @@ "hide-output" ] }, + "outputs": [], + "source": [ + "from pydvl.influence.torch.pre_conditioner import NystroemPreConditioner\n", + "\n", + "cg_influence_model = CgInfluence(\n", + " nn_model,\n", + " F.cross_entropy,\n", + " hessian_regularization=0.1,\n", + " progress=True,\n", + " use_block_cg=True,\n", + " pre_conditioner=NystroemPreConditioner(rank=5),\n", + ")\n", + "cg_influence_model = cg_influence_model.fit(training_data_loader)\n", + "cg_train_influences = cg_influence_model.influences(\n", + " *test_data, *training_data, mode=\"up\"\n", + ")\n", + "mean_cg_train_influences = np.mean(cg_train_influences.numpy(), axis=0)" + ] + }, + { + "cell_type": "markdown", + "id": "28f46c8c", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, + "source": [ + "Let's compare the results obtained through conjugate gradient with those from the direct method" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "599bab0a", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, "outputs": [ { - "name": "stderr", + "name": "stdout", "output_type": "stream", "text": [ - "Conjugate gradient: 0%| | 0/54 [00:00" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.scatter(mean_train_influences, mean_cg_train_influences)\n", + "plt.scatter(\n", + " mean_train_influences[:num_corrupted_idxs],\n", + " mean_cg_train_influences[:num_corrupted_idxs],\n", + " facecolors=\"none\",\n", + " edgecolors=\"r\",\n", + " s=60,\n", + ")\n", + "plt.xlabel(\"Direct Influence Score\")\n", + "plt.ylabel(\"Cg Influence Score\")\n", + "plt.title(\"Influence of training points - Cg vs direct method\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "43675da3-4370-40c1-b991-10b349d67518", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, + "outputs": [ + { + "name": "stdout", "output_type": "stream", "text": [ - "Conjugate gradient: 100%|██████████| 54/54 [00:04<00:00, 12.30it/s]\n" + "Pearson Correlation Cg vs direct 0.9977863783358302\n", + "Spearman Correlation Cg vs direct 0.9956671788800161\n" ] } ], "source": [ - "cg_influence_model = CgInfluence(\n", + "print(\n", + " f\"Pearson Correlation Cg vs direct\",\n", + " pearsonr(mean_cg_train_influences, mean_train_influences).statistic,\n", + ")\n", + "print(\n", + " f\"Spearman Correlation Cg vs direct\",\n", + " spearmanr(mean_cg_train_influences, mean_train_influences).statistic,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "c273f980987b1057", + "metadata": {}, + "source": [ + "### Lissa" + ] + }, + { + "cell_type": "markdown", + "id": "fcb66102c654f5aa", + "metadata": {}, + "source": [ + "The LiSSA method is a stochastic approximation of the inverse Hessian vector product. Compared to conjugate gradient it is faster but less accurate and typically suffers from instability.\n", + "\n", + "In order to find the solution of the HVP, LiSSA iteratively approximates the inverse of the Hessian matrix with the following update:\n", + "\n", + "$$H^{-1}_{j+1} b = b + (I - d) \\ H - \\frac{H^{-1}_j b}{s},$$\n", + "\n", + "where $d$ and $s$ are a dampening and a scaling factor." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "ddf5d245e72bd4c2", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-output" + ] + }, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6190a894ffb0420fad95b73afd230a2a", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Lissa: 0%| | 0/1000 [00:00" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.scatter(mean_train_influences, mean_lissa_train_influences)\n", + "plt.scatter(\n", + " mean_train_influences[:num_corrupted_idxs],\n", + " mean_lissa_train_influences[:num_corrupted_idxs],\n", + " facecolors=\"none\",\n", + " edgecolors=\"r\",\n", + " s=60,\n", + ")\n", + "plt.xlabel(\"Direct Influence Score\")\n", + "plt.ylabel(\"Lissa Influence Score\")\n", + "plt.title(\"Influence of training points - Lissa vs direct method\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "4cd9e82d2c024051", "metadata": { + "editable": true, "slideshow": { "slide_type": "" }, @@ -729,20 +1025,32 @@ "name": "stdout", "output_type": "stream", "text": [ - "Percentage error of cg over direct method:0.00014557354006683454 %\n" + "Pearson Correlation Lissa vs direct 0.9996494901218611\n", + "Spearman Correlation Lissa vs direct 0.99770259717359\n" ] } ], "source": [ "print(\n", - " f\"Percentage error of cg over direct method:{np.mean(np.abs(mean_cg_train_influences - mean_train_influences)/np.abs(mean_train_influences))*100} %\"\n", + " f\"Pearson Correlation Lissa vs direct\",\n", + " pearsonr(mean_lissa_train_influences, mean_train_influences).statistic,\n", + ")\n", + "print(\n", + " f\"Spearman Correlation Lissa vs direct\",\n", + " spearmanr(mean_lissa_train_influences, mean_train_influences).statistic,\n", ")" ] }, { "cell_type": "markdown", "id": "09a7eb2e", - "metadata": {}, + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, "source": [ "### Arnoldi" ] @@ -757,9 +1065,15 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 28, "id": "ca91c121", - "metadata": {}, + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, "outputs": [], "source": [ "arnoldi_influence_model = ArnoldiInfluence(\n", @@ -777,9 +1091,13 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 29, "id": "e0a6763d", "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, "tags": [ "hide-input" ] @@ -789,7 +1107,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Percentage error of Arnoldi over direct method:95.7504391670227 %\n" + "Percentage error of Arnoldi over direct method:33.655646443367004 %\n" ] } ], @@ -799,12 +1117,257 @@ ")" ] }, + { + "cell_type": "code", + "execution_count": 30, + "id": "6b5b115c03594119", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABTsAAALGCAYAAACK4UUfAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8WgzjOAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB1KklEQVR4nOzdd5xlZ10/8M93d1NI2U2DZEmADUQpKr2JBBJALJFqQA0oICIgKiA/IdgookQQDaIoKBBAeogUl6JgAqH33kM2ZJMQCEl2A+m7z++PcyYMk6l37sydufN+v173deee85xzv3PL3JnPPKVaawEAAAAAWO3WjboAAAAAAIBhEHYCAAAAAGNB2AkAAAAAjAVhJwAAAAAwFoSdAAAAAMBYEHYCAAAAAGNB2AkAAAAAjAVhJwAAAAAwFoSdAAAAAMBYEHYCsGZV1caq+qeq2lZV11ZVq6rbV9Ux/dfPHnWN46aq7l9VH6mqS/vH+G2jrmmqqjqjqtoQzvPs/ns8ZvFVrR5VtaX/vk8ZdS1cX//cnLGA9tP+PBzW+2RUpnsc1up7dhSq6pT+sd4yovt/dH//jx7F/QOwtISdAKwq/R8nw/oD+wVJ/ijJF5M8P8lzknx3SOdmiv6P2rcnOTLJK9M93m+c4xh/kK5h/T8ito26jtlU1T6TwvvXj7oexstqDZUFxwCM0oZRFwAAI/RrSb7RWnvA5I1VdasR1TPu7pdk7yRPa62t5FDod5LsM4Tz/HO6MPc7QzjXanJeklsn2THqQpbJbyTZlKQleWhVHdxa+8GIa1oOw3qfrCRr9T0LAGNFz04A1rIbJ7lg1EWsITfur88faRVzaK19p7X2tSGc56LW2tdaa5cPo67VorV2Tf99r5X31u8n2Z3khUn2SvKo0ZazPIb1PllJ1up7FgDGjbATgFVv8hyB/ddvrKqLqurKqvpUVf3alPYTwwIryb0nhsbPNY/dbENqZxuyV1W36ms7t6qurqoLq+r1VXXLadpeN49ZVT2+qr7Yfx8XVtXLq2rTDPd/RD//6Der6oqquriqPlFVfzlD23+uqm9X1VVV9YOqekdV3WW273+G+314VX2wqnb09/vFqnpmVe01qc0x/eP9nH7T6ZMe8+s9XpOOOyPJq/qbr5p0zHXzvE1+3KvqhKr6eFX9cPLz1A+Ff2v//V5RVTur6sNV9ciZ7nfqsNHJ8xZWN6/r1n7o8uVV9YGqusc055n2NTHxWquqQ/rn9IL+efhyVT1mhpr26s838ZydXVXP67fPew7GKe+VW1XV2/rXyo+q6kNVdf9Z7v/E/vm9vH8Mz6yqh892H1O2z/u1Pek1c7MkN5vy3J8yqd3RVfXOqtrePy7fraqPVdWz5vN4LFZV/WySuyd5f5K/S3J1kt+bpf3Ec39YVf1HVZ1XVbuqn6ZhwNfGuqp6QlV9sn/t/6j/+olVNe/f9avq0Kp6Rf98XFFVn6uqGYPb6d4nM7Tbu3+vfK+qph1VVlX/2n/vvzZp26Kf26ras6r+sqrOmvq+maH9XO/ZaZ+3vs3dqurUvs6rq/t5/7KqunGmUVUHVdXfVNWX+vfUjqr6fFWdVFX7TryPktx7Ug3z+qya+r1U1W9V1af7+zm/qv5h4jGoqvv039vOqrqkql5bVQfPcM55fXZU9/N34nma/PN+2tdLLeyz7k7V/Tz/Xl/DOVX10qraPEP7o6rqLf339qPq5ow+bq7HD4DVzTB2AMbJzZJ8Ism3k7w2yUHphpi+varu11o7vW93SpIz0v0xdk5/O0m2DbugqvrlJKcl2SPJO5N8K8kRSR6a5LiqOra19plpDn1Bkl/qj/mfJMcmeVySo5LcZ8p93DnJe9N9vx/s72+fJLdJ8uwkfz2p7R378x3UH3NakkOSPDjJh6rqIa21d83ze/vbJM9MclGS1yf5YZJfSfK3SX6pqu7fWrs63eP6nCTHpPvD/dX58WO9LTM7JcmlSR6Ubq7Pz03ad+mUtk9L8ovpHq/T0w0rnvCvSb6c7rG5IMnBSX41yWur6pattesFwrO4c5KnJ/lokv9IctMkv57k/VV1+9ba1+d5ngOSfDhdOHZquh6BD0vyyqra3Vp79UTDqqokb01yXJJvphtqu0eSRyf5mQXUPtmR/ffwxSQvS7I53Xvl3VV1QmvtTZPuf890r5V7J/lakn9J9/o6Psmb+u/7zxZw3/N5bW9L95p5Sn/75EnHf66v65eTbE2yM8k70g2fPyjdEPo/yI/D9aX0+/31Ka21i6vqnUl+vaqObq2dOcMxByX5WLr3y2npeoVeOGn/AZnna6P32iQnJDk33WuyJXlIkpcmuWeSR8z1TVTVIUk+kuTmST7UXzYn+bd0z9HAWmtXVtWb0j1Wv5LueZ9833ule+1dmOQ9/bZFP7f9++bN6X5+nJXufbNnkt9N8nMDfCszPm9V9btJXp7kqr7ec5P8VLrg+wFVdffW2nVD46vqyHQ/p26W5NPpfkatS/LTSZ6a7nG/tP8+H923m/w9b1tA3X+U7nF/W7rPvfv393FQVb093bD9rX3990jyyHSfCb8y+SQL/Ow4ud8+9ef9dBbyWfdr6X4WVrr3xjlJ7pTkiUkeVFX3bK2dPan9T6X7OXdwknen+9lxVP9YvHuWmgBY7VprLi4uLi4uq+aS7g/5NmXblontSZ41Zd8v9dvfNcO5zphm+zH9vmdP2b4tybYZ6np2f8wxk7YdmOSSdGHgbaa0/9l0fzR/Zsr2U/rzfCfJTSdt35AurGtJ7jpp+55Jzu63nzBNXUdMOce3klyZ5N5T2t04XaBwQZK95vE8/PykOg+bch/v7Pf92VyP0Tzu59H9MY+e43H/UZI7zNDmFtNs2zNdb7xrkhw+Zd8Z07zGJl4T16slyeP77S+dz/c76Tz/kWT9pO23SXJtkq9Maf/bffsPJtlz0vYD0oWP076OZ3gstky6/xdO2Xfn/vG4JMnGSduf2bd/V5INk7bfKN17oiW5xzT3ccpiXtvzeM+9tT/mdtPsO2S+r7FBL+nmn704XSh1g37br/U1vXaGYyYe+9dMfiwX8dr4rb79Z5LsN2n7vkk+lWl+Lkz3ekkXdLUk/zjDa2K6n4dnZMr7ZJbHauLnxanT7HtYv+9Fw3xu0wXALV3Ytfek7QelCz+nexyendnfs9d73tIFlFen+9k69WfJfZPsSvJfU7Z/pD/fM6f7/qbUO+/HeYbvZUeSW0/avle6f/7sSvKDTPosSBe4/m9/3O0nbV/wZ8dMj+Wk/adkYZ91+/X17kpy9JRzPaNv/z9Ttv9Pv/3JU7Y/aNJz+uiFPrYuLi4uLiv/Yhg7AOPknCTPm7yhtfbedH9M3XUE9fxOukDqWa21r0yp60tJ/j3JHarqNtMc+9w2qSdQa+3a/HhI9+Tv5QHpwqV3tGkW/WmtbZ9087gkt0jyktbaB6a0Oz9dD5vD0v2BPpff7a+f11q7bgX7vs6npev1NONw3iXw8tbaZ6fb0Vo7a5ptV6frobgh8/t+J3y4tXbKlG2vTBdELeQ1dnmSP2mt7ZpU01fS9ei7dVXtN6nto/rrv+jrnmh/aSb12l2gHUmeO3lDa+1TSV6X7jX7kEm7fjddKPAn/fM70f57k+5/Ic/1fF/b83XF1A2ttYsGOM9CPSzdPzTe1FqbqOE9Sb6b5PiqOnCG465O8v8mP5ZTLOS1MfE+PLG19sNJ7X+ULgBK5nhuqmqPdL0/L0sXUF1n0mtiUVprH03yjXS9HA+asnvi9T21x2qyuOd2Ytj/n7XWrpx0/MUZ7H0z0/P2xHQ9rZ/cWjtvSq3vT9fT8wFVtX/SDcNOF/5+Lt3UB5lyzEWT6x2Cf2qtfXXS+a9K8qZ0webWyZ8FrbXdSf6zv3m7SecY5mfHVPP9efCgdEH1m9r1e02/KN0/Rn6xqm6adEPu0/X2Pztdr97JNb89yQcCwNgyjB2AcfK5yQHBJOem++NyuU3c5+2q6tnT7P/p/vrWSb4yZd+npml/bn89OUS5e389nyF5E/XcbIZ6fmpSPXMNZb9jf/1/U3e01r5RVduTHFlVm1pry7Eq9ydm2tH/8fuMdH+I3zTJDaY0OXwB93O956W1dk1VXZiffF7m8s3W2s5ptk9+jifCqzukC48/Mk37Dy3gPif7TGvtsmm2n5EufLpDklf3Ac1RSc5r0y9GM/H832EB9z3f1/ZcXpduOoiP98OkT08XRm+f/bBOVR2QHw+Tn+zkPkiey8QQ9olgJq21a6vqdekC/99O8k/THLetD4pnspDXxh3TvTbOmKb9B9L1gpvrublVumkJzpzhvXpGhrPo0quT/E2S30w3xD5VdWi63vefba19YVLbRT23vYnHZrr3yBkLrn7m523i5+q9p85d2btRkvXpft5/Oj/+mf3ePlxcatO93yYWifv0NPsmAtsjJm0b5mfHfOqb7ufBbJ8511bVB9P94+8O6f7BOfG6/9AMvxeckX4+VADGj7ATgHFy6Qzbr81oFuWbWOThcXO022+abZdOs22iR9H6SdsO6K/Py9wm6nnYAPVMtam/nmnF7QvSBYsHpOtFuNS+O93Gqrp5uiD0wCRnphvWuCNdCLQlXYgz7WIlM7h0hu3X5iefl8WcJ1POtSnJxTP0BLxwmm3zMdNxE4/jpinXsz3PyY9fh/Nx6TTbpvu+Z9VaO62fw+9p6Xo4Pj5JqurT6YYH/+8cpzggP15EZbJTZqjxOlV163TzYX6ttfaxaY5/Wrr3/XRh57Sv1Ulmuu/ZXhtXT23cB0AXpQvbZjPxHM/1mlis16TrUfmo9GFnuh6lGzKlV+cQntvkx4/NNdPsG+R7mumYiZ+rfzrH8RM/Vw/or+fzM3sYpvv5e+089u0xadswPzumunSWGqa+1pP5/yxartc1ACuQsBMA5m93urkep3PANNsm/pC83ZReS8N0aX89n96JE/U8qLX2jkXe78S5Dks3/91Um6e0W2pthu1/ku4P9cdMHX5eVb+V4fRYW2o70y0msmGawPPQAc8503GH9dc7plwfNk3bZPmf55/QWtuaZGtV7ZvkbunmzHxikv+uqjtMnT5iyrHb0i10MoiJXp23mmmF6SQ/W1X3aK1N7ZE7U/tB7Ej32thjaqhX3crnh6R7/cx1jmTu18SitNa2V9X/JblfVd2q7yn8qHRzgk43BcfAz21vxscmg31PMz1vE4/fphl65E51aX+9kB7lozbMz47F1jDfn0XL8roGYGUyZycAzN8lSQ7t57ib6s7TbJvo8XX00pV03X38yqytfrLtMOqZmB/zmKk7quqodEMgz57ncODZTAw/XEivycmO6q/fOs2+1TKE8bPpfme7xzT77jngOe84MYfgFMdMus/0Q93PSnJ4v7LxVMf2158ZsI657Mo8nvvW2o9aa//XWvuTJH+b7p8S83lPLFi/evhvp/vnxyuTvGKay3v75nP16l6sidfGvabZd690j91cz83X0s0Tevuq2jTN/mMWU+AUp/TXj6qq2ye5bZJ3t9a+P9MBi3huP5PusZnuPXLMAmqey0J/rk60/6Wqms/fYruSpKoG/Rk4DIN8diz2Z/dUs33mbMiPa/vMlPb3nOGxu955ABgfwk4AmL9PpBsV8ZjJG6vq0Ul+YZr2r0rXi+dZVXW9hVeqal1VHbPImt6ZbmGGB/Y9Fafex+R5196eLrh6UlX96nQnq6qfr6p95nG/r+yv/6Kqbjjp+PVJ/j7d7xivmNd3MLsf9Nc3HfD4bf31MZM3VtUvZXkXUFqM1/TXz6uq63oW98HUXw54zk1J/mryhqq6c7phxTuS/NekXa9M1wPyhZNDg6o6ZNL9vzJL4wdJblhVU+dZTVXdqw85pproyXX5EtX06+l6C7+3tfbY1trvTb0keXiSHyV5+AwB4rBMPO7Pn/y+7b8+qb856/uw7/X4uiT7Z8oCRZNeE8NyWrqepo9M8uh+2ylTGw3puZ2YS/VvqmrvSec+KMlfzLPe+fjndL1T/7Gqfnrqzqras6quCwlba59ON//u7fPjRaQmtz94cr1Z/M/AYRjks2PYdb8tycVJfquq7j5l31OSHJnkfROLHfXzu/5vv/0Pp9T6oKyef3YBMADD2AFg/l6SLuj816q6b7pFFG6fbvGG/043zPI6rbUfVNXx6YKjj1XV+5N8Od1wyJv0xx2cZPIftgvSWru6qh6Wbi7K11fV49P1wtk73WIR903/ed8vpPPQdL3OtlbVR9KtCHx5X89dktw83XDAWcOE1tpHquoFSZ6e5EtVdWq6cOdXkvxsukVBXjjo9zXJR/tanlJVB+fH86y9ZJ4LH7003XP2lr7G8/v6fjnJm5P8xhBqXGqvSbeoyy+ne6zfkW4+vV9P8skkt0zXy3AhPpjk96rqbulW+d6c7rFYl+TxU4bj/n265/VBST5fVe9Kt6DNw9LNB/mC1tqgCyXN5f3pXpfv6RcguSrJ51tr70w3H+bhVfXhdKH21UnulOQ+Sc5J8sYlqmliCPt/zNSgtbazqt6SLtB7ZJJ/WYpCWmuv74Obhyf5clW9Ld3PlwenC3ne1Fqbz2rqf5buZ8VT+oDzQ/nxa+JdSR44pHqv6B+Xxyb5g3SB2NZpmg7juX1DuvofmO598/Z075vj071vbrGob6bXWvtaVf1uuuD5y1X1nnQrz++RLug7Osn30y0ENeGR6RbI+duq+vX+60q30M/9+7bb+rbvT/deO61/712R5JzW2muHUf98DPjZcXq6n0vPr6qfTTcyIq215w1Yww/7x/ktST7Qv46+k+51cf90nw2Pn3LYk9J9hpxcVfdP8vl0vf0fku4fhQ8YpBYAVj5hJwDMU2vtK1V1v3RDKR+QbhGFM9OFlg/NlLCzP+b9VXXbJP8v3arDR6f7w/38dKvKTje8eqF1faofEnpiulDqHkkuS/KtTOm911r7QlXdLt1clr+WLgjcnW5xh8+mW7Dlonne7zOq6rPpes38Tro/7s9K12vqRdMtmjLA93ZJHwY8K11wtG+/6z8zj3ki++/32CTPS3Jcut99Pp/u+bo0qyDsbK21qnpIukDqt5P8Ubrn69XpwtwHZ+55Gac6O8kT0vX+e0K6RZo+k+S5rbX3Tm7YB+q/mO41c0J//9emexyf0lp7w2Df2bw8L918uA9I13t6fbrv+53p3ocPSTeFxP3SvY6/028/ubV2ybCL6Xvu3TvdoifvnKP5v6d7zT4uSxR29n4r3crr1y3kk+SrSV6U5F/nc4LW2kVV9Qv58c+2Oyf5ero5MrdlSGFn75R0YeceSd4ww8+JRT+3/fvmYel+Lj463c+pC9L1+HxukisX+41Muq//rKrPp1tQ6dh04duP0v2cPzXJm6a0P7uq7pjun0UP7mu7Mt1j/aIkk1d9/48kN0v3D4+np/sZ9oEkyxZ2Jgv/7GitfbWqHpXus+8P8uN/6g0UdvbnfHv/Ov2zdJ+nm9KFnP+W5K9ba+dPaf/NvhfoSeleR8ck+UK6x/yGEXYCjK1qbZhzpAMAsFz6EPJ/kpzUWnvmPNpvSRd0vrq19uilrQ4AAJafOTsBAFa4qrrxNNsOzo/nZfyvqfsBAGAtMowdAGDl+4d+COlH0s3/d0S6KQsOSvKy1tonRlkcAACsFMJOAICV77R0K1E/IN0cllemW+zqFRnOqvcAADAWzNkJAAAAAIwFc3YCAAAAAGNB2AkAAAAAjAVzdi5QVVWSGye5bNS1AAAAAMAqtX+S89uQ59gUdi7cjZNsH3URAAAAALDKHZHkvGGeUNi5cBM9Oo+I3p0AAAAAsFD7p+tMOPRsTdg5uMtaaztHXQQAAAAArCbdLJFLwwJFAAAAAMBYWNVhZ1Xdq6reWVXnV1WrqgfP45hjquozVXVVVX2rqh699JUCAAAAAEttVYedSfZN8vkkT5pP46o6MsnWJKcnuX2Sk5P8R1X90hLVBwAAAAAsk1U9Z2dr7d1J3p3Me6z/E5Kc3Vp7Wn/7q1V1zyRPTfLeJSkSAAAAAFgWq71n50L9fJL3Tdn23n77tKpqr6raOHFJt1oUAAAAALDCrLWw87AkF07ZdmGSjVV1gxmOeWaSHZMu25euPAAAAABgUGst7BzE85NsmnQ5YrTlAAAAAADTWdVzdg7gu0kOnbLt0CQ7W2tXTHdAa+2qJFdN3J7n3KAAAAAAwDJbaz07P5rkvlO2/WK/HQAAAABYxVZ12FlV+1XV7avq9v2mI/vbN+33P7+qXjPpkH9LcvOqekFV3aqq/iDJw5P84/JWDgAAAAAM26oOO5PcOcln+0uS/EP/9XP725uT3HSicWvt7CTHpevN+fkkT0vye6219y5XwQAAAADA0qjW2qhrWFWqamO6Vdk3tdZ2jroeAAAAAFhNljJfW+09OwEAAAAAkgg7AQAAAIAxIewEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAsbRl0AAAAAANDZcuLW9UmOTrI5yQVJztx20nG7RlvV6qFnJwAAAACsAFtO3PrQJNuSnJ7k9f31tn478yDsBAAAAIAR6wPNU5McPmXX4UlOFXjOj7ATAAAAAEaoH7r+4v5mTdk9cfvkvh2zEHYCAAAAwGgdneSIXD/onFBJbtK3YxbCTgAAAAAYrc1DbrdmCTsBAAAAYLQuGHK7NUvYCQAAAACjdWaS7UnaDPtbknP7dsxC2AkAAAAAI7TtpON2JXlyf3Nq4Dlx+yl9O2Yh7AQAAACAEdt20nGnJTk+yXlTdm1Pcny/nzlUazP1jmU6VbUxyY4km1prO0ddDwAAAADjY8uJW9enW3V9c7o5Os8ctx6dS5mvCTsXSNgJAAAAAINbynzNMHYAAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLwk4AAAAAYCxsGHUBAAAAACyfLSduXZ/k6CSbk1yQ5MxtJx23a7RVrX4e15VBz04AAACANWLLiVsfmmRbktOTvL6/3tZvZ0Ae15VD2AkAAACwBvTB26lJDp+y6/AkpwrmBuNxXVmqtTbqGlaVqtqYZEeSTa21naOuBwAAAGAu/RDrbekCuJqmSUuyPcmRhl7Pn8d1MEuZr+nZCQAAADD+jk5yRKYP5NJvv0nfjvnzuK4wwk4AAACA8bd5yO3oeFxXGGEnAAAAwPi7YMjt6HhcVxhhJwAAAMD4OzPd3JEzLd7Skpzbt2P+PK4rjLATAAAAYMz1i+M8ub85NZibuP0Ui+gsjMd15RF2AgAAAKwB20467rQkxyc5b8qu7UmO7/ezQB7XlaVam6mXLdOpqo1JdiTZ1FrbOep6AAAAABZiy4lb16dbHXxzurkkz9TzcPE8rvO3lPmasHOBhJ0AAAAAMLilzNcMYwcAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAAgLEg7AQAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAAgLEg7AQAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAAgLEg7AQAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAAgLEg7AQAAAAAxoKwEwAAAAAYC8JOAAAAAGAsrPqws6qeVFXbqurKqvp4Vd11lraPrqo25XLlctYLAAAAACyNVR12VtVvJPmHJM9Jcsckn0/y3qq60SyH7UyyedLlZktdJwAAAACw9FZ12JnkT5L8e2vtVa21ryR5QpLLk/zuLMe01tp3J10uXJZKAQAAAIAltWrDzqraM8mdkrxvYltrbXd/++dnOXS/qjqnqs6tqrdX1c/McT97VdXGiUuS/YdRPwAAAAAwXKs27ExySJL1Sab2zLwwyWEzHPP1dL0+H5Tkkem+/49U1RGz3M8zk+yYdNm+iJoBAAAAgCWymsPOBWutfbS19prW2udaax9I8tAk30/y+FkOe36STZMuswWjAAAAAMCIbBh1AYtwUZJdSQ6dsv3QJN+dzwlaa9dU1WeTHDVLm6uSXDVxu6oWXikAAAAAsORWbc/O1trVST6d5L4T26pqXX/7o/M5R1WtT/JzSS5YihoBAAAAgOWzmnt2Jsk/JHl1VX0qySeSPCXJvklelSRV9Zok57XWntnf/qskH0vyrSQHJPnTJDdL8h/LXTgAAAAAMFyrOuxsrb2pqm6Y5LnpFiX6XJJfbq1NLFp00yS7Jx1yYJJ/79tekq5n6D1aa19ZtqIBAAAAgCVRrbVR17CqVNXGdKuyb2qt7Rx1PQAAAACwmixlvrZq5+wEAAAAAJhM2AkAAAAAjAVhJwAAAAAwFoSdAAAAAMBYEHYCAAAAAGNB2AkAAAAAjIUNoy4AAAAAWJ22nLh1fZKjk2xOckGSM7eddNyu0VYFrGV6dgIAAAALtuXErQ9Nsi3J6Ule319v67cDjISwEwAAAFiQPtA8NcnhU3YdnuRUgScwKsJOAAAAYN76oesv7m/WlN0Tt0/u2wEsK2EnAAAAsBBHJzki1w86J1SSm/TtAJaVsBMAAABYiM1DbgcwNMJOAAAAYCEuGHI7gKERdgIAAAALcWaS7UnaDPtbknP7dgDLStgJAAAAzNu2k47bleTJ/c2pgefE7af07QCWlbATAAAAWJBtJx13WpLjk5w3Zdf2JMf3+wGWXbU2U69zplNVG5PsSLKptbZz1PUAAADAqGw5cev6dKuub043R+eZenQCc1nKfE3YuUDCTgAAAAAY3FLma4axAwAAAABjYcOoCwAAAIDVYjUO216NNQMMSs9OAAAAmIctJ259aJJtSU5P8vr+elu/fUVajTUDLIawEwAAAObQh4OnJjl8yq7Dk5y6EsPD1VgzwGJZoGiBLFAEAACwtvTDwLelCwlrmiYtyfYkR66U4eGrsWZg7bBAEQAAAIzO0UmOyPShYfrtN+nbrRSrsWaARbNAEQAAAMxu85DbLYeh1GxxI2C10bMTAAAAZnfBkNsth0XXbHEjYDUSdgIAAMDszkw3v+VMi160JOf27VaKRdVscSNgtRJ2AgAAwCz6YdtP7m9ODQ8nbj9lJQ3vXkzN/dD1F/c3p875OXH75L4dwIoi7AQAAIA5bDvpuNOSHJ/kvCm7tic5vt+/oiyiZosbAauWsBMAAADmoQ8HtyQ5NskJ/fWRKzHonDBgzatxQSaAJFZjBwAAgHnrh32fMeo6FmKAmlfjgkwASfTsBAAAAH7SalyQCSCJsBMAAACYZDUuyAQwoVqb6R81TKeqNibZkWRTa23nqOsBAABgcP2K4kenm3/ygiRnCvE6W07c+tB0q7IfMWnzuemCzhU7Tymw8i1lvibsXCBhJwAAwHiYIczbnuTJwryOMBhYCsLOFUTYCQAAsPr1Qeep/c2atGvij+TjBZ4AS2Mp8zVzdgIAALCm9L0VX9zfrCm7J26f3LcDYBURdgIAALDWHJ1u6PrUoHNCJblJ3w6AVWTDqAsAAACAZbZ5yO2GzlyZAIPRsxMAAIC15oIhtxuqfj7RbUlOT/L6/npbvx2AWQg7AQAAWGvOTLfq+kwr9rYk5/btltWkhZMOn7Lr8CSnCjwBZifsBAAAYE3ph4M/ub85NfCcuP2U5R42Ps+Fk/51y4lb91i+qgBWF2EnAAAAa862k447LcnxSc6bsmt7kuP7/cttPgsn3SjJeXp4AkxP2AkAAMCa1AeaW5Icm+SE/vrIEQWdyfwXRDokhrQDTKtam2mKEqZTVRuT7EiyqbW2c9T1AAAALJaVv1eGLSduPSbdYkTz0dL1Qj3ScwWsNkuZr+nZCQAAsIZZ+XtFmWvhpMkqyU3ShdQA9PTsXCA9OwEAgHExaeXv5CfniZz4Q3FUc1cO3WrpvTrLczKTE7addNwblrAkgKHTsxMAAIChmufK3yf37Va11dR7ddLCSRfN85ALlrAcgFVH2AkAALA2zWfl71U/THrLiVuPT9dT8vApuw5Pt8jPX660QLcPPA9P8v3MPKS9JTk33dB3AHobRl0AAAAAIzHflb/n227F6YPON2b6QHdi23OT/P6WE7c+eSUN2d920nHXbDlx6xPSBbUt008z8JRhDMVfLUP8AeZDz04AAIC1ab7Dn1flMOl+iPpbksyn1+ZEL88VNax90pD286bs2p4hzae6mob4A8yHsBMAAGBtmmvl71U7THrKfKTzMdFr8l+3nLh1jyUoaWB9oLklybFJTuivjxxi0DnbEH+BJ7DqWI19gazGDgAAjIuVvBr7YoZWbzlx6zHpeigO4vtJnrAU3/dKGi7e17ItXbA53TD/li4MP9KQdmDYrMYOAADA0C3HMOlBDGFo9WLmGT0kS9CrcQUOF18TC1QBa4+wEwAAYA1bymHSgxjS0OrFzDM6Ef6dPKxV2lfocPGxX6AKWJusxg4AALDG9cOUzxh1HVPm2pza47DSDa0+ecuJW98+x9DqiflIZxqiPZfJvRrPGOD46wzxexq2sV6gCli79OwEAABgpRjK0Oo+NHxyf3MxC1UMo1fjSh0uPrYLVAFrm56dAAAADN2Ai/EMe2j1xUkOnmfb6QyjV+OKHC6+7aTjdm05ceuT0w2vb5l+gaqnWJwIWG307AQAAGCoFrEYz1CGVk+aI/OgeZ5vqmH2alyxw8VX6gJVAIsh7AQAAGBoFrkYz6KHVs8xR+Z8DLtX44oeLr7SFqgCWKxqbTHTl6w9VbUxyY4km1prO0ddDwAAwErRB43bMvPCQC1d8HfkTEHipLA0mX5o9aw9DrecuPWYdD1JB3VuuqDzuvsYcEj+5JoW9T0BjJulzNf07AQAAGBY7p35LcbzR32AeD1zDa1O8vYtJ249ZsuJW3+rv556nkHmvnxqZujVuIgh+dcxXBxg+ejZuUB6dgIAAFxfH/79e+Y/T+b2JE+eKeibrjdlkgelG6J+xEznWWDPzll7mg67R+Zie4gCjIulzNeEnQsk7AQAAPhJs4SCs1lQYNjfx1tnafLr20467rR5DKWf1/0PY0g+ANMzjB0AAIAVaRELAk20PXmmIe1T7uPlc5zvZVtO3Lq+Dx6f3G+brXfPXEPIj878huQfPUddACwjYScAAACLMVcoOJv5Bob3TnLwHG0O6dvNNkfm95L8Y+a34vh85/4cZI5QAJbIhlEXAAAAwKo2jLBvrnMcM8/zHJPk/5Iu8Nxy4ta3Z/A5Mi8YcjsAloGwEwAAYI2Za6Gcfv+98+OQ8YwkH5ghKBxG2LckgWFf7xkDHn5muqHuc83ZeeaA5wdgCRjGDgAAsIb0C/1sS7di+ev762399on9FyZ5f5K/7C/vT3LxlhO3Hj/NKQ9ZRDktybmZOzA8Y57nm2+7Oc0x9+fE7adYnAhgZbEa+wJZjR0AAFhNpvTiPCrJc/pdk3srTvxh+MIkT5/jlC/YdtJxz5h07gsz93ya05n3auzzvJ+Lkhw27PCxD39fnG5e0gnnpgs651xFHoDrW8p8Tdi5QMJOAABgtZghqJtJS7I7yawro/cetu2k407dcuLW+6Tr9TmI3Un+fiI4nUv/vZyamYeUzxmaDmquYf8ALIywcwURdgIAAKvBpHAwGWyl9Nl8L8mNkzwr3TD3Qcy7Z+cEvSwBxoOwcwURdgIAACtd3xNxW2ZeXGcYjk1ynwwediY/XuTnyPn2lNTLEmD1W8p8zWrsAAAA4+fozG/o+mJsTvLBRZ6jktwkXb1nzOeARa6wDsCYsxo7AADA+Nk8wDELHfb3U0leNcD9TGeQegHgevTsBAAAWCGGOET7ggW2b+l6WU5cz8dzsvCAdCYLrRcApiXsBAAAWAFmWHxn+5YTtz55rsV3pglJP5xuLsz5ztm5M8mmebadbLb25yb5f0n+OckhM7SdmLPzzAXeLwBMyzB2AACAEZu0cvrhU3YdnuTUfv9sx25LcnqS1/fX3+6/Tq7f+7L1lzckubjftmkR5c/k0dtOOu7NSZ4wSx1Jt5K6BYYAGAqrsS+Q1dgBAIBhmsfK6detWN7fntyD85Akb+6315RjkuSFSU7IT/YW/UGS/0nym9McN0wnbDvpuDckM/ZaPTdd0Dlrr1UAxs9S5mt6dgIAAIzWxMrpM4WOEyuW/1mu34PzjZPaTD0mSX4ryc2TPCvdUPUkObjfXrPc5zBcNw9nH2huSXJsuvD12CRHCjoBGDZzdgIAAIzWfFcif84029bP0n4iJH1NftyLczlMOw9nP1T9jGWsA4A1SNgJAACwxOZYZX0hK5EP0hNzuYPOxDycAIyIYewAAABLaIYFhLZNWnTozHQ9IWdaUGFi+1IOOZ/Oa/Pjoe8z2T3l9vYkxxueDsCoWKBogSxQBAAAzNekVdaT6RcQOn7bScedNke75Q45JzwiyZXp6ppp4aSHJ7ko0/dYBYBpLWW+JuxcIGEnAAAwHwtZZX3bScftmmXF8n9P8tylrXZax2476bgzrKQOwLBZjR0AAGD1me8q60f3t7cmeUeSTyb5nyT3T3JkkpOSLGdvyZYuzDwzsZI6AKuLBYoAAACWxnxXWd+85cSt/5XkwVO23z/J25Kcl9lXXR+maRcYspI6AKuFsBMAAGBpzHeV9Scl+YUZ9j14OKXM6LIk+0+6vT2GpwOwigk7AQAAlsbEKuszzdk5Yaagc6m9MMkz0w2jt8AQAGPBAkULZIEiAABgvmZZZX2UvpfkSdtOOu7UOVsCwBKwQBEAAMAq1A8HPz7dvJsrwV8nubGgE4BxJewEAABYQn3g+ZhR19H7P8PUARhnA4edVXXTqvq3qvp6VV1cVffqtx9SVf9UVXcYXpkAAACrUz+U/b9GXEZLcm66eUQBYGwNtEBRVd0m3YfkuiQfT3LUxLlaaxdV1T2T7JvksUOqEwAAYNWZNGfnKOfrnFio4Sl6dQIw7gbt2fmCJJcm+ekkj8z1P7i3plvRDwAAYE3acuLW9UlevIx32frLD6Zs357k+H44PQCMtYF6dia5V5Lntta+X1UHT7P/O0kOH7wsAACAVe/oJEcs4/1tT/KUJG/v73tzkguSnKlHJwBrxaBh57okl8+y/4ZJrhrw3AAAACtS31tzIsS8R5J7JrlRkguTvDbJi7eddNw1ffPNS1jKxND0ZyX5Vq4fap6xhPcNACvWoGHnZ5Icl+SlU3dU1YYkv5nkY4uoa96q6klJ/jTJYUk+n+SPWmufmKX9w5L8dZItSb6Z5BmttXctQ6kAAMAq1Yecf57kaUk2TtPk0CQvTPKCLSdufeG2k457RroAcrHOTfKhJL+U5KBJ27enm4PT0HQAmGTQOTufn+SXq+pfk/xsv+3Qqrpfkv9JcuskJw2hvllV1W8k+Yckz0lyx3Rh53ur6kYztL9HkjckeUWSOyR5W5K3VdXPTtceAABgy4lbj0+yM93fHdMFnZNVkqdvOXHr36Vb1HV7ftwLc6GemuTIbScdd0K63qPHJjmhvz5S0AkA11etDfa5W1W/nW6y7U3pPtBbf70zyRNba28YVpGz1PDxJJ9srf1hf3tduv98vqS1dr2wtarelGTf1tqvTdr2sSSfa609YZ73uTHJjiSbWms7h/BtAAAAK1QfWj59gEN3JblBkgekW409WfiK7CdsO+m4Jf+7CgCW21Lma4MOY09r7bVVdVqSX0zyU+l6iZ6V5L2ttcuGVN+MqmrPJHdK18t0oqbdVfW+JD8/w2E/n64n6GTvTfLgWe5nryR7Tdq0/yD1AgAAK9+kOTk3J7llBgs6k2R9kidtO+m4k/ueoS/OwhcrGsYweABYUxYcdlbVPul6T57UWnthuqHgo3BIul8gLpyy/cIkt5rhmMNmaH/YLPfzzHSTfgMAAGNsy4lbH5rBQsmZHJUk20467rQtJ26dvEL6hUleneTwTN/bs6Ub/n7mkOoAgDVjwWFna+3yqro2yY+WoJ6V6Pn5yd6g+6f7xQMAAFilpvTgvCBdZ4o3D/luDp/4ol8l/YxJ9//kdMPbJ6YDmzAxz9hTJq2sDgDM06ALFL01yfFVtdA5Z4bponTz4Bw6ZfuhSb47wzHfXWD7tNauaq3tnLgkWfIh+gAAwNLpe3BuS3J6ktf312/sdw/zb5zzZtrRLy50/DRttic53uJDADCYQcPON6ZbDfD0qnpEVf1CVd1x6mWIdV5Pa+3qJJ9Oct+Jbf0CRfdN8tEZDvvo5Pa9X5ylPQAAMEb6oPPUTOp12Vuf4QadSfKt2Xb2geaWWGUdAIZmoNXYq2r3pJvTnaCStNba+kELm2cdv5FurpvHJ/lEkqckeXiSW7XWLqyq1yQ5r7X2zL79PZJ8IMmJSbYm+c0kf5bkjq21L83zPq3GDgAAK8g0Q9LPnG4IeN9uW4Y3J+dsdiW5wbaTjrtmGe4LAFaVlbga+2OGWcSgWmtvqqobJnluukWGPpfkl1trE4sQ3TTJ7kntP1JVJyR5XpK/TfLNJA+eb9AJAACsLDMsKrR9y4lbnzxND8mjszxBZ5K8SNAJAMtvoJ6da5menQAAsLQW0FNzYkh6Mv0iPz8x9+WWE7eekOR1Qy536gJDu9IFnc8Y8v0AwNhYynxt0Dk7r1NV+1XVrfvLfsMoCgAAWJtmWDxoW799crv16Xp0Jtefa3Pi9sl9uwk3GmKp5yb59SR7JXlqkn/ur28g6ASA0Rl0GHuq6i5JXpDknvlxaLq7qs5M8vTW2qeGUB8AALDKzLdn5jTHTe6pOdnhSU7dcuLWyT015xqSXklu0rc7o9/2vXl9AzPbkeSVSd6Rn/yeTl7keQGAIRmoZ2dV3S3JB5PcMcl/pPsP5lP7r++Y5INVdddhFQkAAKwO8+2ZOc1xC+2puXmeJW2edP7F9OxsSXYm+dNtJx13xnzCWwBg+Q06jP1vkpyX5JattSe21v6pvzwxyS2TnN+3AQAA1ohJPTMPn7JromfmbIHnRE/NqUHnhMk9NZOux+h8HDUpgP3HeR4zn/sHAFagQcPOuyV5WWvtu1N39CuhvzzJ3RdTGAAAsHoMOIfmZAvqqZnkzCTb59H+yZk+gJ1qviu3zrdOAGAEBg07d2f2+T7X920AAIC1YaE9M6eab0/NC5KkH0b+8nm0P3jS/c/m+wu5fwBgZRp0gaKPJHlSVb2+tXbO5B1VddMkf5Dkw4stDgAAWDXm3TNzugWM8uOemodn+mCy9fvPnLTtW/O8z7mCzqcm+Zck317g/QMAK8ygPTv/LMmmJF+rqtdX1bP7yxuSfK3f98xhFQkAAKx4855DM9MsYJTkQemGnCfXH1I+cfspUxYGGlYvywu3nXTcNQPcPwCwwgwUdrbWPptu3s73JHlgkr/qLw/ot929tfb5YRUJAACseBM9M2ea+7IluSjJczLDAkb918enWwx1su1Jjt920nGnDXCf8zExNP60Bd4/ALDCVGvz/fyf4QRV65LcsL/5/dbaWM/VWVUbk+xIsqm1tnPU9QAAwEoxaTX25CeHgk/80XFxkoMy+zDxI/vbPzHMfaYelcO6z8nnn26YvR6dADA8S5mvLTrsXGuEnQAAMLM+fHxxusWKJpyb5N+TPHcepzh229/92oeS3CfJTZNcmeTMTFkrYJ73+ZT+69nCUD02AWCZLWW+NtAw9qp6XlV9bpb9n62qZw1cFQAAsCr1weGWJMcmOaG/PjLzWEyo2u684tTn/F6Ss5O8N11A+tokZ6fqnan6mYXc57aTjjvN0HQAWFsG6tlZVV9L8l+ttWkXIaqqv0nykNbabRZZ34qjZycAACzclhO3HpNuMaJpVdudv3/XyXnol/6vVfLKH+2x97/d5Q9fu/GGP7pky3P+92W3uvfZn35gdcPKfzGtfWKA+zc0HQBWiKXM1zYMeNxNk5w1y/6zk9xswHMDAADjZ2IxocMzzfyZj/jsu9tDvnR6Xbtu/SN+6k/fflWS/0pyxDl73iCPfvhzst9VPzrvjJf//gWHXL7jbam6eVq7ciF33gebZwzh+wAAVrCBhrEn+WFmDzOPTDe3DgAAwETY+OT+5k8OL2utPebT76jvHHDYh/ug89RMWbH9h3vte+OHn/B3P52uZ+bxy1AyALAKDRp2npHk8VV1+NQdVXWTJL+fWYaoAAAAa89M82fe8fyvfe8WF5+Xwy676NnpFhpKrt/7s7598BH55OG3uWp38silrxYAWI0GDTv/MsleSb5cVS+qqt/tL/+Q5ItJ9uzbAAAAXGe6xYTe+IZnPj5JnviQP9uYbkX16w1z79VXb3TkXpfveYOjlqNWAGD1GWjOztba16vq6CQvSfLUKbs/mOSPW2tfXWxxAADA+Lne/Jl/d+19kmTTlT+8+VzHbrryh7liw16791uy6gCA1WzQnp1prX2htXbvJDdKcvf+cqPW2jGttS8Mq0AAAGDsfSLJzsd94rTbzdZo/6t+lPt96+O5YOMhH16mugCAVWbQ1div01q7KMlFQ6gFAABYi1r7YapefZvvnf2Im11ywQXnHLj5sEwzlP1JH31z9rr26mzeedFfjaBKAGAVmHfPzqo6rKruVVX7Tdm+R1U9t6rOqqrLq+ozVfXA4ZcKAACMsb+u5JJ3v+oPN9zr259Otd3Xrdh+8I8uzV+97+V5wsffmo/c7HavueGPLjl3lIUCACtXtdbmbpWkqk5O8ltJbtJau3rS9n9K8odJLk1yVpLbpFug6L6ttQ8Oud6Rq6qNSXYk2dRa2znqegAAYGxUHZHk1CR3O3fTja794qFHbdjv6ity9+98Ma2qfeRmtz3l2G9/+rGZ7x8xAMCKtJT52kLCzs8m+XRr7fcmbbthkguSfC3JPVtrl1bVzZJ8NMknW2sPGmaxK4GwEwAAllBVJbnH7tRjLt5n021/tMfe6y7c/6CPH3rZxc+62aUXmD4LAMbAUuZrC5mz8yZJXjNl26+lGwr/9621S5OktXZOVb0qyWOHUiEAALB2dL0xPrwu+fAhSQ5JcrMRlwQArB4LWY197yQ/nLLt6CQtyfunbD8ryYGLqAsAAAAAYEEWEnaeneT2U7Ydm+Sc1trUCcL3S3LxIuoCAAAAAFiQhYSdpyV5VFX9RlXdpKr+PN2IkjdP0/buSb49jAIBAAAAAOZjIQsU7ZvkzHS9O1uSSvL1JHdtrV02qd3BSc5J8sLW2nOGXfCoWaAIAAAAAAa3IhYoaq39qKrumuQhSW6eLtB8W2vtyilND0/yrCSnDq1KAAAAAIA5zLtnJx09OwEAGKUtJ25dn26h0M1JLkhy5raTjts12qoAAOZvKfO1hczZCQAAjNCWE7c+NMm2JKcneX1/va3fDgCw5gk7AQBgFegDzVPTTRs12eFJThV4AgAIOwEAYMXrh66/uL9ZU3ZP3D65bwcAsGYJOwEAYOU7OskRuX7QOaGS3KRvBwCwZgk7AQBg5ds85HYAAGNp0WFnVW2uqttV1b7DKAgAALieC4bcDgBgLA0cdlbVg6rqa0m2J/lMkrv12w+pqs9W1YOHUyIAAKx5Z6b7vbvNsL8lObdvBwCwZg0UdlbVA5KcluSiJM/JpLmDWmsXJTkvyWOGUSAAAKx12046bleSJ/c3pwaeE7ef0rcDAFizBu3Z+VdJPthau2eSf5lm/0eT3GHgqgAAgJ+w7aTjTktyfLqOBZNtT3J8vx8AYE0bNOz82SRvnmX/hUluNOC5AQCAafSB5pYkxyY5ob8+UtAJANDZMOBxlyeZbUGimyf5wYDnBgAAZtAPVT9j1HUAAKxEg/bsPD3Jo6rqemFpVR2W5HFJ/mcxhQEAAAAALMSgYeefJzkiySeTPD7dpOi/VFXPS/LFdAsWPWcoFQIAAAAAzEO1NnUxx3keWPUzSV6cbp6gmrTrjCRPaq19ddHVrUBVtTHJjiSbWms7R10PAAAAAKwmS5mvDTpnZ1prX05yv6o6MMlR6XqJfru19v1hFQcAAAAAMF8Dh50TWmuXpBvODgAAAAAwMgPN2VlVf1xV751l/7ur6omDlwUAAAAAsDCDLlD02CRfmWX/V5L8/oDnBgAAAABYsEHDzlskmW0Boq/1bQAAAAAAlsWgYefVSQ6bZf/mJLsHPDcAAAAAwIINGnZ+LMmjq2r/qTuqalOSx/RtAAAAAACWxaCrsT8nyQeSfK6qTk7y5X77zyZ5SrqenScstjgAAAAAgPkaKOxsrX28qh6Q5GVJXpyk9bsqydlJHtha++hwSgQAAAAAmNugPTvTWvvfqjoqyR3y48WIzkrymdZam/lIAAAAAIDhGzjsTJLW2u4kn+4vAAAAAAAjs6iws6puk+TmSQ5MN4T9J7TWXrOY8wMAAAAAzNdAYWdV3SLJfya5a6YJOXstibATAAAAAFgWg/bsfFmSn0u38vqZSS4ZVkEAAAAAAIMYNOz8hSR/21p7yTCLAQAAAAAY1LoBj7soyY5hFgIAAAAAsBiDhp3/luSRVbV+mMUAAAAAAAxq0GHs30iyPsnnq+qVSc5Nsmtqo9baaYuoDQAAAABg3qq1tvCDqnbPo1lrrY1dz8+q2phuCP+m1trOUdcDAAAAAKvJUuZrg/bsPHaYRQAAAAAALNZAYWdr7QPDLgQAAAAAYDEG7dmZJKmqvZLcMcmNkny4tXbRUKoCAAAAAFigQVdjT1X9cZILknwoyWlJbttvP6SqLqqq3x1OiQAAAAAAcxso7KyqxyQ5Ocl7kjw2SU3s63t3/l+S3xxCfQAAAAAA8zJoz86nJXl7a+2EJO+cZv+nk/zMwFUBAAAAACzQoGHnUUnePcv+i5McPOC5AQAAAAAWbNCw89Ikh8yy/zZJvjvguQEAAAAAFmzQsPNdSX6/qg6YuqOqfibJ45K8YxF1AQAAAAAsSLXWFn5Q1Y2TfDzdwkTvTPL7Sf4zyfokv55ulfa79osVjZWq2phkR5JNrbWdo64HAAAAAFaTpczXBurZ2Vo7P8md0q3G/hvpQs/fTvKAJG9IcvdxDDoBAAAAgJVroJ6d1ztJ1Q3TBaffb63tXvQJVzA9OwEAAABgcEuZr20Yxklaa98fxnkAAAAAAAY1UNhZVX81j2attfbXg5wfAAAAAGChBl2gaLah6i3dHJ6ttbZ+0MJWKsPYAQAAAGBwK3GBonVTL+l6id4iyT8m+VSSGw2xTgAAAACAWQ0Udk6ntba7tXZ2a+3/JflmkpcM69wAAAAAAHMZWtg5xQeT/OoSnRsAAAAA4HqWKuy8c5LZ5vUEAAAAABiqQVdj/50Zdh2Q5F5JHprkPwasCQAAAABgwQYKO5OcMsu+i5KclOS5A54bAAAAAGDBBg07j5xmW0tySWvtskXUAwAAAAAwkIHCztbaOcMuBAAAAABgMZZqgSIAAAAAgGU1r56dVbU73TD1hWittUGHyQMAAAAALMh8w8jnZuFhJwAAAADAsplv2HlaknNaazuWshgAAAAAgEHNd87OzyY5buJGVf1fVd13aUoCAAAAAFi4+YadVyTZZ9LtY5IcOvRqAAAAAAAGNN9h7J9P8idVtSvJxFD2u1TVlbMd1Fo7bTHFAQAAAADMV7U297pDVXXnJKcmuWm/qSWpOQ5rrbX1iytv5amqjekC302ttZ2jrgcAAAAAVpOlzNfm1bOztfapqjoqyS3SDV8/I8nfJHnfMIsBAAAAABjUfIexp7V2bZKvJ/l6Vb06yX+31j6+ZJUBAAAAACzAfBco+gmttceMOuisqoOq6nVVtbOqLq2qV1TVfnMcc0ZVtSmXf1uumgEAAACApTPvnp1TVdWBSX4ryc2THJjrz+HZWmuPXURtc3ldks1JfjHJHkleleTlSU6Y47h/T/JXk25fviTVAQAAAADLaqCws6p+Kd2CRfsm2Znkkmmazb3y0YCq6tZJfjnJXVprn+q3/VGSd1XV/2utnT/L4Ze31r67VLUBAAAAAKMx0DD2JC9K8t0kt2utHdBaO3Kay82HWOdUP5/k0omgs/e+JLuT3G2OYx9RVRdV1Zeq6vlVtc9sjatqr6raOHFJsv/iSgcAAAAAlsKgw9iPSvKnrbUvDrOYBTgsyfcmb2itXVtVF/f7ZvL6JOckOT/JbZP8XZJbJnnoLMc8M8mzFlUtAAAAALDkBg07v5kl6OFYVSclecYczW496Plbay+fdPOLVXVBkvdX1S1aa2fNcNjzk/zDpNv7J9k+aA0AAAAAwNIYNOz8iyT/UlWvb61tG2I9L0pyyhxtvp1uCP2NJm+sqg1JDur3zdfEivJHJZk27GytXZXkqkn3s4DTAwAAAADLZdCw875Jvp/kq1X1v0nOTbJrSpvWWnvyQk7aWvt+f95ZVdVHkxxQVXdqrX2633yfdHOQfnzmI6/n9v31BQupEwAAAABYeaq1hS+aXlW759GstdbWL7ykedfw7iSHJnlCkj2SvCrJp1prJ/T7D0/y/iS/01r7RFXdIskJSd6V5Afp5uz8xyTbW2v3XsD9bkyyI8mm1trOIX5LAAAAADD2ljJfG6hnZ2tt0FXch+kRSf45XaC5O8lbk/zxpP17pFt8aGK19auT3C/JU5Lsm6436luTPG95ygUAAAAAltJAPTvXMj07AQAAAGBwS5mvrYQemgAAAAAAizbvYexV9YUFnru11m63wGMAAAAAAAaykDk7L05izDsAAAAAsCLNO+xsrR2zhHUAAAAAACyKOTsBAAAAgLEg7AQAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAAgLEg7AQAAAAAxoKwEwAAAAAYCxvm06iqzk6yO8mtWmvX9LfbHIe11totFlsgAAAAAMB8zCvsTPKBdOHm7im3AQAAAABWhGpNZrkQVbUxyY4km1prO0ddDwAAAACsJkuZr5mzEwAAAAAYC/Ods/Neg5y8tfbBQY4DAAAAAFio+c7ZeUZ+co7Oyvzm7Fy/0IIAAAAAAAYx37Dz2Cm390rygiT7JHl5kq/322+V5HFJfpTk6cMoEAAAAABgPgZaoKiq/iHJPZPcq7V25ZR9+6Rbrf2DrbWnDaXKFcQCRQAAAAAwuJW4QNEjkrx2atCZJK21y5O8NskjF1MYAAAAAMBCDBp27ptk8yz7N6cb4g4AAAAAsCwGDTvfl+TJVfXQqTuq6teTPLlvAwAAAACwLAads/PwJP+X5KgkFyT5Vr/rFklunOSsJPdprW0fUp0rhjk7AQAAAGBwK27OztbaeUlul+RPknwpyaH95ctJnprkduMYdAIAAAAAK9dAPTvXMj07AQAAAGBwK65nJwAAAADASrNhPo2q6v8GOHdrrd13gOMAAAAAABZsXmFnuh6gCx3vXgtsDwAAAAAwsHmFna21Y5a4DgAAAACARTFnJwAAAAAwFuY7jH1aVXXvJMcluVm/6ZwkW1trH1hsYQAAAAAACzFQ2FlVeyZ5Q5IHp5ub89J+1wFJnlZV/5Xkt1pr1yy+RAAAAACAuQ06jP1ZSR6S5EVJNrfWDmqtHZTksCR/n+ShSf5qOCUCAAAAAMytWlvoIutJVZ2d5IzW2mNm2H9KkmNaa1sWVd0KVFUbk+xIsqm1tnPU9QAAAADAarKU+dqgPTs3J/n4LPs/nq6XJwAAAADAshg07Nye5JhZ9t+7bwMAAAAAsCwGDTtfneThVfVvVXXLqlpfVev6r/81ycOSnDK0KgEAAAAA5jDonJ3rk7wiye8kaUl297vWpVud/dVJHtta2z39GVYvc3YCDEHVwUl+N8kj00178qMk/5PkpWntC6MsDQAAgKW1lPnaQGHndQdX3TbJrya5Wb/pnCTvamP8h6qwE2CRqn4pyVuS7LU79dYvbD7qipZ1h93me9++6167rjkkyT8k+dOM4T/MAAAAWNp8bcNiDu5DzbENNgEYsqq7J3l7kvc9+Lf//tTP3fhWf53kiCTZsOvaPO4T/3Xp0z/46qdWckWSvxhlqQAAAKw+i+rZmSRVtV+SA9MNX/8JrbXvLOrkK5CenQCLUPX+JAfe8Y9ed9LF+2x648TWSS3aUz70uvrjD7/x2nVpN0lr3x1BlQAAACyhpczXBlqgqKr2rqrnV9X3+sK2JTl7mgsAdKpuleQ+16xb/6KL99n0oomtU1u98s4PbFdv2GP9rlr3e8tcIQAAAKvcoMPYX5rkUUneluTMJJcMqyAAxtbdkuR+v/evF6Ufuj6dnXvvXx+56W3zsxee9as3Sp63bNUBAACw6g0adj40yX+01h4/zGIAGGsbkuR7+x50yFwNr96wR65Zv2HfpS8JAACAcTLQMPYkLclnhlkIAGPv20ny1A+/7sDZGq3fvSu3veCbuXLDnt9enrIAAAAYF4OGnW9Pcr9hFgLA2PtAkrN+7xNvu2eS7en+cXY99//mx9qNL7soh132g79Z1uq4vqpK1aGpulmq9hl1OQAAAHMZNOz86yQ3r6qXV9WdquqGVXXQ1MswCwVglWttd5K/W5f2G6849Tnvndg6ucmtvnd2e957/6Uu2O/gz+979RWfWv4iSZJU7ZOqP07ylSTfTbcQ4cWpek2q7jTS2gAAAGZRrU3bsWb2g6p2T7o54wlaa+sHKWolq6qN6Vag39Ra2znqegBWlapK8oIk/+97+x74lRfc+1GHffVGRx608cof5iFfPj0P+soH2uV77LXtwCt/eOe0dvGoy12Tqg5J8t4kt92d+q+tt7rnl75zwGE3uPt3vnjQHc//2i9WctMkj0trrxpxpQAAwCq1lPnaoGHnszNLyDmhtfacAWpa0YSdAENQ9ZAkT05y74lNV67f48I9du96yfq2+8Vp7YejK24N68LoDyS51V/+4hNOeu0df+2pSY6Y2L1u967tW0958ldu/f1t90ty/7T2/lGVCgAArF4rKuysqj2S3DrJxa217cMsZjUQdgIMUdVNkhyW5PIkX0tru0Zc0dpWdUyS0//9Lg/567+5z2P/YmLrpBat2u586iWP/NbBV+z8TlozfzcAALBgS5mvDTJn5+4kn07y0GEWAsAa1Nq5ae2Tae3Lgs4V4XEt+drfHPu7j+lv15T91Wpd/u6YRx+U5L6pOmqZ6wMAAJjVgsPO1v0xek6SvYZfDgAwQrfeduCNv5mqI3L9oHNCnXHknQ7uv/7pZaoLAABgXgZdjf0lSX7fiusAMFZ2XbFhr/3marThx51wd8/WDgAAYLltGPC49UmuSnJWVZ2aZFuSK6a0aa21f1xEbQDA8vrszS/e/qD1u3dl17r1MzY69qxPpSW7K/niMtYGAAAwp0FXY59PT47WWpv5L6VVygJFAIytqjsm+fSf/sofX/yW297/wEwzlH2va69u737lH+7acsn571zXmvm7AQCABVvKfG3Qnp1HDrMIAGAFaO0zqTr1+e/55wdctM8BOf2ou7ZMCjz3v/JH7eT//vu62aXf3b0ued4IKwUAAJjWQD0753XiqgNba5csyclHSM9OAMZa1T5J3pzkuC/f6Mir33abY/f80V43yM9ceFYe8qXT2567rrlqQ9v94LT23lGXCgAArE5Lma8NNeysqr2SPDDJI5L8cmtt76GdfIUQdgIw9qrWJfnV3cmTWtWx61rb86r1e3xvj927Xra+7X5ZWjt/1CUCAACr10ocxn6dqqok900XcD4kycYk30/y+sWeGwAYgdZ2J/nvdcl/J0mqau9rr16aoSAAAABDNHDYWVV3Shdw/maSw5K0JG9M8s9JPtaWanw8ALC8fKYDAACrxILCzqq6ebqA8xFJfirJeUlel+QTSd6U5K2ttY8Ou0gAAAAAgLnMO+ysqo8muWuSi5KcmuT3Wmsf6vfdYmnKAwAAAACYn4X07LxbkrOT/EmSra21a5emJAAAAACAhVu3gLZ/mOSCJP+V5LtV9bKqOrZfoAgAAAAAYKTmHXa21l7aWrtnklskOTnJ0Unen27ezuemW6DIAgYAAAAAwEjUYhZYnbQi+28k2ZzkwiTvTPKOJO9rrV05jCJXkqramGRHkk2ttZ2jrgcAAAAAVpOlzNcWFXZed5KqdUnuk+SRSR6SZP8kl7fW9lv0yVcYYScAAAAADG4p87WFzNk5o9ba7tba+1prj05yaJLfSjfEHQAAAABgWQylZ+daomcnAAAAAAxuxffsBAAAAAAYNWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAAAAADAWNoy6AABgBlVHJXlEkhsnuSLJ+5O8K63tGmldAAAAK5SwEwBWmqqDk7wyyQNbculle+3zvWpt4/5XX/HklpxTVX+Q1t416jIBAABWGmEnAKwkVQckOSPJYe+49b3++em/8scPuXKPvX86SW57wTfyjA+8+tB7nPP5d1bVQ9Pa20dZKgAAwEpTrbVR17CqVNXGJDuSbGqt7Rx1PQCMmaoXJ3nU0371KX/51p+734sntk7sXrd7V3vp206q+571yR/tsfvaQ9Paj0ZTKAAAwGCWMl+zQBEArBRV+yV59K6qf3nrz93v6RNbJzfZvW59Pe++v9fW79617zXr1p+w/EUCAACsXMJOAFg5jkmy8c/v/6SvJDkiU4LOCds3HVofudltc8kNNv7uchYHAACw0gk7AWDl2JQkp9/iLnPOqX3hfgclyYFLXRAAAMBqIuwEgJXj4iQ57mtnztnwiB3fS0tdtOQVAQAArCLCTgBYOU5PcvGfn/6K2yXZnmTaVQRv8YNz2922fzkHXbHj35a1OgAAgBVO2AkAK0VrVyb59/WtPf5xHz/tJRNbJzfZ89pr2nP+99/qqvV77Nhj9663LH+RAAAAK1e1Nm2nEWZQVRuT7EiyqbW2c9T1ADBmqvZJ8r4kP/fBLXf4r6f+2tPu+4N9D7jxut27cu+zP5M/OfM/r77N975d61u7f1o7Y8TVAgAALNhS5mvCzgUSdgKw5Kr2S/KPSX67JeuuWr/Hxeta22/P3dfu25LPVvKktPbRUZcJAAAwCGHnCiLsBGDZVB2S5OFJbpzkiiTvT/Lx+PAGAABWsaXM1zYM82QAwBC1dlGSl466DAAAgNXCAkUAAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMhQ2jLgBYwaoOTPJbSY5McnWSjyZ5d1rbNdK6AAAAAKaxant2VtWfV9VHquryqrp0nsdUVT23qi6oqiuq6n1V9VNLXCqsPlV7purFSc5LcnKSByX5nSTvTHJWqn59hNUBAAAATGvVhp1J9kzyliT/uoBjnp7kj5M8IcndkvwoyXurau/hlwerVNWGJKele588/7v7HXyzLc/479/f8oz/fvo/3eM3n7A7+VySU1P1qJHWCQAAADBFtdZGXcOiVNWjk5zcWjtgjnaV5PwkL2qt/X2/bVOSC5M8urX2xnne38YkO5Jsaq3tXETpsDJV/VGSf0zyq1ue8d/7JXlxkiOu29/a9ve+8klfv+VF37lXki1p7fzRFAoAAACsRkuZr63mnp0LdWSSw5K8b2JDa21Hko8n+fmZDqqqvapq48Qlyf5LXimMSvdPgScleWsfdJ6a5PApbQ5/2CNecJ9ra93uJL+3/EUCAAAATG8thZ2H9dcXTtl+4aR903lmuqR54rJ9+KXBinGbJLe8av0ep6Tr0ZkkNaVN7dx7v7zrVve8tiXm7gQAAABWjBUVdlbVSVXV5rjcapnLen6STZMuR8zeHFa1A5Pk+cc85obpXutTg84Jte2AzftevX6PzctWGQAAAMAcNoy6gClelOSUOdp8e8Bzf7e/PjTJBZO2H5puwZVptdauSnLVxO1ulC+MrUuT5ODLd8z5T4XNl/0gV+yx1xV7LXlJAAAAAPOzonp2tta+31r72hyXqwc8/dnpAs/7Tmzo5+C8W5KPDqF8GAdfSfKNh37p/+42W6P9r/pRfvXrH8q5mw47fZnqAgAAAJjTigo7F6KqblpVt09y0yTrq+r2/WW/SW2+VlUPSZLWLTt/cpK/qKoHVtXPJXlNuhXa37bc9cOK1NruJC+98WXfv/e9v/2p7ydp07TJM844JXvtuiaHXXbRXy57jQAAAAAzWLVhZ5LnJvlskuck2a//+rNJ7jypzS3TzbM54QVJXpLk5Uk+2R/3y621K5ejYFglXlrJ+1556nM3PfGjb8mBl++4LvD8mQvPykvfflIe+bl3570/dfeX3vBHl5w7ykIBAAAAJquuwyPz1Q9935FkU2tt56jrgSVRtVeSf9xd9bhr1m3YcM4Bm7P3tVflpjsuzIX7HbTrg0fe8cUP+8L/Pm3UZQIAAACrz1Lma8LOBRJ2sqZU3fDaWveIsw86/J479t5vr/M23ujjl9xg/xc8+tPvHHTuXAAAAGCNE3auIMJOAAAAABjcUuZrq3nOTgAAAACA6wg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLGwYdQEAY6vqJkl+Lkkl+Wpa+/aIKwIAAICxpmcnwLBV3TVV70xyTpKtSf47yVmp+p9U3Xu0xQEAAMD40rMTYJiqHpDk1CTfuHbd+if80QP+9JJLbrDxho/91NsOv9+3PnH/St6fqkeltdeNulQAAAAYN9VaG3UNq0pVbUyyI8mm1trOUdcDrCBVN0vy1STvvd0fv+ENO26w/4uSHDGxe93uXdvf+8o//OZP/eDco5PcMa19cVSlAgAAwKgsZb5mGDvA8DwxydW/8IRXvHnHDfZ/Y5LDJ+/cvW794b/ymJccc8WGPXck+aORVAgAAABjTNgJMDyP3l31mvM2HfqC/nZN2V/Xrt+QV975QRta8ohU7bncBQIAAMA4E3YCDEPVHkkOPePIO12ebuj61KDzupafvfGtNlWyT5KDlq0+AAAAWAOEnQDDsSvJrqvX73HYXA33vfryiS+vXNKKAAAAYI0RdgIMQ2u7k3zwbud+6Y5zNf21r30oP9zzBmelm4wZAAAAGBJhJ8DwvPTAKy/7uWPO+uT3k7TpGtz2gm+0+5z1iex97dUvTGvTtgEAAAAGI+wEGJ63Jfnf/zjtefsf99Uzs273ruvCzGq7c59vfaK9+s3PqktvsPEbG3bvevXoygQAAIDxVDoWLUxVbUw39HRTa23nqOsBVpiqfZO8LsmDzt//kF1n3PxO61tVfv6cL+Tml5yfC/c76IuH/vDie6e1S0ZdKgAAAIzCUuZrws4FEnYC81J1l92pJ/5wrxv8wrXrNuxx+R57f+vgyy993g2uvfpMw9cBAABYy4SdK4iwEwAAAAAGt5T5mjk7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxsGHUBcDIVVWS/ZNUkp1prY24IgAAAAAGoGcna1fVIan68yTbkuxIcmmSc1L156m64ShLAwAAAGDhSie2hamqjemCsU2ttZ2jrocBVf1skvcmOXB36g3v+emf337JDTYecM9zPnfUTS/97rGVXJLkl9PaF0dcKQAAAMBYWcp8zTB21p6qg9MFnd8/4Tf+5q8+suV2z05yxMTuwy676Pz3vPIPrzrgyh++J1W3S2sXjapUAAAAAObPMHbWosclOeiE3/ibl3xky+3+Pcnhk3d+d/9DNt//sS/dsqvWHZLk90ZSIQAAAAALJuxkbekWI3r87tSb+h6dSbcw0U+0+t5+B+W/b3X0NS15Qn8MAAAAACucsJO1Zp8kW973U3fbnm7o+kxBZr3/qLvsW8nNkuy7bNUBAAAAMDBhJ2tNS5JLbrD/AQs9BgAAAICVTdjJWnNFkm/f45zP/9RcDe999mdyxYY9L0hy+dKXBQAAAMBiCTtZW1prSf7tiB3fO+aIHRdekBl6bR6286L2a1/9YPbYde2L+2MAAAAAWOGEnaxFr6jku+961R9fu3nn95Mpgefmnd9vp7zlWbWr1l+0oe3+99GUCAAAAMBClU5rC1NVG5PsSLKptbZz1PUwoKpbJfmf3VWb3/3T97j6/UfddZ8kOfrsz+a4r30o165bf9E+1151r7T21RFXCgAAADBWljJfE3YukLBzjFQdmOR3W/KESo5Kkss37HXenruuOXlD2/2KtHbJiCsEAAAAGDvCzhVE2DmmqvZOkrR25YgrAQAAABhrS5mvbRjmyWDVEnICAAAArHoWKAIAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAAgLFggSIYlqo9kvx0kr2TnJ/WLhhxRQAAAABrip6dsFhVB6bq2UnOSfKlJJ9Kcn6q3pOq+4+0NgAAAIA1RM9OWIyqGyf5vyRH7E795yl3esBXv33w4fvc51uf3HTstz91n0rem6oT09rfjbpUAAAAgHFXrbVR17CqVNXGJDuSbGqt7Rx1PYxQ1bokH09y2B894E9Peudt7n1ikiOu29/a9je//hkfvev2rzwsya+ntdNGVCkAAADAirGU+Zph7DC4+ya58z/d4zdf/s7b3PslSQ7/ib1Vhz/8hL87/rv7HfSFJM8YRYEAAAAAa4mwEwb32JZ86R/u+Yjf72/XlP2Vqvz1fR934yR3TdXPLXN9AAAAAGuKsBMGd4tzDth8TqqOyPWDzgn1sZv83CET7ZepLgAAAIA1SdgJg7v26vV7bJyr0Z67rpn48prZ2gEAAACwOMJOGNzHjrzkvNvuee3sGeb9v/mx7E52JfnM8pQFAAAAsDYJO2Fw/7bH7l2bfucz/31pkjZdg32uvqI99pNvuzap/0prFyxveQAAAABri7ATBtXa15O87M9Of+X+v/H5/8m63bt+IvC80WU/aK869Tm1+bKLrlmX9uzRFAkAAACwdlRr03ZIYwZVtTHJjiSbWms7R13P2KjanOROSTYk+XZa+8KIK5qfqj2SvCzJY87f/5Bd77jNvdf/cM8b5NbfOzu/9M2PZXfVZXvuuvZX0tqHR10qAAAAwEqwlPmasHOBhJ1DVnWbJM9O8pB0QeeETyb5u7T21lGUtSBVleQuu1NPunr9hl9KsvfV6/e4cJ9rrvyXDW33a9LapSOuEAAAAGDFEHauIMLOIaq6R5L3JPnetbXun/7ogU+/YPumQw989KffefBDv/x/x1byi0n+PK397YgrBQAAAGBIhJ0riLBzSKo2JTkryZePfvx/vOzcAw77uyRHXLe/te1vev2JH7nb9i8/PMmvpLX3jKhSAAAAAIZoKfM1CxQxKr+d5IBHPvyvX3vuAYf9Z5LDf2Jv1eG/ccJJD/vBDTZ+M8lTR1EgAAAAAKuLsJNRedTu1Ns/dOQdntXfrin7K1X5+3v99sFJ7p+qGy9zfQAAAACsMsJORuWILx12ix3phq5PDTon1Oc33/KgifbLUxYAAAAAq5Wwk1G58pp1G244V6N9r7584ssrlrYcAAAAAFY7YSejcvrPXHjWXdbt3jVrowd89cxcvX7DxUm+tjxlAQAAALBaCTsZlZfuveuaQx/xuXdfkqRN1+Bml5zffv1L72/rd+9+aVq7ZpnrAwAAAGCVEXYyGq19KskrnvO/L9v46E+9I3tde3WbtC93/84X2uvf8Ge1u+qC9W33P46uUAAAAABWi2pt2k51zKCqNibZkWRTa23nqOtZ1ao2JPmnJE+8dO/9dr//FndZd/X6PXKH87+eW110Ti7Ze/+zDrzysnuntfNGXSoAAAAAw7GU+Zqwc4GEnUug6qd2VT3xsr32vf+uWneDqzbsec5Bl+84ae9d17wvre0edXkAAAAADI+wcwURdgIAAADA4JYyXzNnJwAAAAAwFoSdAAAAAMBYEHYCAAAAAGNB2AkAAAAAjAVhJwAAAAAwFoSdAAAAAMBYEHYCAAAAAGNB2AkAAAAAjAVhJwAAAAAwFoSdAAAAAMBYEHYCAAAAAGNB2AkAAAAAjAVhJwAAAAAwFoSdAAAAAMBYEHYCAAAAAGNB2AkAAAAAjAVhJwAAAAAwFoSdAAAAAMBYWLVhZ1X9eVV9pKour6pL53nMKVXVplzes8SlAgAAAADLYMOoC1iEPZO8JclHkzx2Ace9J8ljJt2+aphFAQAAAACjsWrDztbas5Kkqh69wEOvaq19d/gVAQAAAACjtGqHsS/CMVX1var6elX9a1UdPFvjqtqrqjZOXJLsv0x1AgAAAAALsNbCzvck+Z0k903yjCT3TvLuqlo/yzHPTLJj0mX7UhcJAAAAACzcigo7q+qkaRYQmnq51aDnb629sbX2jtbaF1trb0vya0nukuSYWQ57fpJNky5HDHr/AAAAAMDSWWlzdr4oySlztPn2sO6stfbtqrooyVFJ3j9Dm6syaRGjqhrW3QMAAAAAQ7Siws7W2veTfH+57q+qjkhycJILlus+AQAAAIClsaKGsS9EVd20qm6f5KZJ1lfV7fvLfpPafK2qHtJ/vV9VvbCq7l5VW6rqvknenuRbSd47iu8BAAAAABieFdWzc4Gem+RRk25/tr8+NskZ/de3TDfPZpLsSnLb/pgDkpyf5H+S/GU/VB0AAAAAWMWqtTbqGlaVqtqYblX2Ta21naOuBwAAAABWk6XM11btMHYAAAAAgMmEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBY2jLoAVpCqQ5LcI8neSc5N8vG0tnu0RQEAAADA/Ag7Saq2JPnrJA9PsuekPd9M1T8m+be01kZRGgAAAADMl7Bzrau6TZLTk1x9ba37yyc/4E/P/uqNjtz/EZ97936P+dQ7fmFd2kuT3CFVjxd4AgAAALCSlfxqYapqY5IdSTa11naOup5FqdqQ5GtJLn/g7/zDi76w+aefl+SISS22/9M7XvC2B371g3+Y5Alp7WUjqRMAAACAsbGU+ZoFita2ByS5xbPu9/hXf2HzT78qyeFT9h/+xw98+pO2HbD5I0memqpa/hIBAAAAYH6EnWvb77Tk46++0wOe0t+eGmZWkvzl/Z94VJJbJrnrMtYGAAAAAAsi7FzbDj/ngM3fTzd0faZem/XFw466Uf/1ETO0AQAAAICRE3aubVfuWrfukLka7XfV5RNfXrG05QAAAADA4ISda9v/bbnkgttPCjOn9cCvfjC7qq5O8rHlKQsAAAAAFk7Yubb9+7q2e48nf/gNlyVp0zU45EeXtMd86h27quX1ae3iZa4PAAAAAOZN2LmWtXZeJc963Cf/a/8//cCra+OVP/yJwPN253+9veENf1b7X335znVpfzWqMgEAAABgPqq1aTv0MYOq2phkR5JNrbWdo65n0aoqyTN3p5571YY91p1+8zvXD/faJ7f+3tn5uQvPymV77nPe/ldffp+09o1RlwoAAADA6reU+Zqwc4HGLuycULV5V637/R177/fgXbVu32vXrz//oMt3vGivXde+K63tGnV5AAAAAIwHYecKMrZhJwAAAAAsg6XM18zZCQAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY2HDqAtYxfavqlHXAAAAAACrzf5LdWJh58JNPBnbR1oFAAAAAKxu+yfZOcwTVmttmOcbe9V157xxkstGXcsY2T9deHxEPK6wknhvwsrkvQkrk/cmrDzel7AyTbw3b5XkG23I4aSenQvUPwHnjbqOcTJpOoDLWmtDTfOBwXlvwsrkvQkrk/cmrDzel7AyTXpvXjDsoDOxQBEAAAAAMCaEnQAAAADAWBB2shJcleQ5/TWwcnhvwsrkvQkrk/cmrDzel7AyLel70wJFAAAAAMBY0LMTAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrCTZVdVm6vqpKo6vaouq6pWVccs4Phn98dMvVy5dFXD+Fvse7M/x+FV9eaqurSqdlbV26vq5ktTMawdVXVAVb28qr5fVT/q36d3nOexp8zwufm1pa4bxkFV7VVVf1dV51fVFVX18ar6xXke63MRlsCg70t/S8LSqqr9quo5VfWeqrq4f389egHHD/w772QbFnoADMEtkzwjyTeTfDHJzw94nicm+eGk27sWWResdYt6b1bVfklOT7Ipyd8muSbJU5N8oKpu31r7wXDLhbWhqtYl2ZrkdklemOSiJH+Q5IyqulNr7ZvzOM1VSX5vyrYdQy0UxtcpSY5PcnK6z8hHJ3lXVR3bWvvQTAf5XIQldUoGeF9O4m9JWBqHJPmrJN9J8vkkx8z3wCH9zptE2MlofDrJwa21i6vq+CRvGfA8p7bWLhpiXbDWLfa9+QdJfirJXVtrn0ySqnp3ki8leVqSPxtmsbCGHJ/kHkke1lo7NUmq6s1JvpHkOUlOmMc5rm2t/efSlQjjqarumuQ3k/xpa+3v+22vSffZ9oJ0782Z+FyEJbDI9+UEf0vC0rggyebW2ner6s5JPrmAY4fxO28Sw9gZgdbaZa21i4dwqqqqjVVVQzgXrHlDeG8en+STE3/Q9ef8WpL3J3n4YuuDNez4JBcmOW1iQ2vt+0nenORBVbXXfE5SVeurauPSlAhj6/h0Pb5ePrGhtXZlklck+fmquskcx/pchOFbzPtygr8lYQm01q5qrX13wMOH8jtvIuxkdft2uiF4l1XVf1bVoaMuCNaqfsjBbZN8aprdn0hyi6raf3mrgrFxhySfaa3tnrL9E0n2SfLT8zjHPkl2JtnRz5/0L/0QW2B2d0jyjdbazinbP9Ff3366g3wuwpIa6H05hb8lYeUZxu+8SQxjZ3W6JMk/J/loujnIjk7ypCR3rao7T/OhByy9g5LslW7YwlQT226c5OvLVhGMj81JPjjN9snvrS/OcvwF6Yb1fSbdP7p/Od3w2ttV1TGttWuHWCuMm82Z+7NtOj4XYekM+r5M/C0JK9lif+e9jrCTRen/a73nPJtf1Vpri73P1tqLp2x6a1V9Isnr0v3xdtJi7wNWuxG8N28wca5p9l05pQ2sWQO+N2+QRby3WmvPnLLpjVX1jSR/k2640BvnWQ+sRYO+/3wuwtIZ+HPR35Kwoi3qd97JDGNnse6V5Ip5Xm65VEW01l6f5LtJ7rdU9wGrzHK/N6/or6ebR2XvKW1gLRvkvXlFhv/e+scku+NzE+Yy6PvP5yIsnaF+LvpbElaMob239exksb6W5DHzbDvdUINhOjfdkCFg+d+bF6f7L9zmafZNbDt/CPcDq90g780LMuT3Vmvtiqr6QXxuwlwuSHL4NNvnev/5XISlM+j7cjb+loTRG9rvvMJOFqVfZeuUUdfRr6K3JclnR1wKrAjL/d5sre2uqi8mufM0u++W5NuttcuWqx5YqQZ8b34uydFVtW7KhO13S3J5km8stI5+YZRDknx/ocfCGvO5JMdW1cYpc/ndbdL+6/G5CEvqcxngfTkTf0vCivG5DOl3XsPYWdGq6qZVdasp2244TdMnJrlhkvcsS2Gwxk333kxyapK7VNWdJ7W7ZZL7JHnLctYHY+bUJIcmeejEhqo6JMnDkryztXbVpO23qKpbTLq99wwrPv9lkorPTZjLqUnWJ/n9iQ1VtVe6Htofb62d22/zuQjLZ+D3pb8lYWWoqs1Vdauq2mPS5nn/zjvn+YewXgwsWFX9Rf/lzyT5zSSvTHJ2krTWnjep3RlJ7t1aq0nbLk/ypnSrcF2Z5J79OT6f5Bdaa5cvw7cAY2mR78390/1HfP8kf5/kmiR/ku6X0du31vQggwFU1fokH0rys0lemOSidIso3DTJXVprX5/UdluStNa29Le3pHtfviHdEPok+aUkv5ruj7rjpvznHJiiqt6c5CHp5rr9VpJHJblrkvu21j7YtzkjPhdh2SzifelvSVhiVfWHSQ5It3r6E5Oclh/3nH5Ja21HVZ2S7n17ZGttW3/cvH/nnbMGYSejUFUzvvCmfBidket/QP17knskuUm6iWrPSfLWJH9jOBAszmLem/32I9L90nn/dKMHzkjy1Nbat5aiXlgrqurAdL/0PTjdSpSfTPL/WmufmtJuW/ITYecBSV6S5O7pfuFcn+6Pwtcl+fvW2jXLUT+sZlW1d5K/TvLIJAcm+UKSv2ytvXdSmzPicxGWzaDvS39LwtLrfx+92Qy7j2ytbZsu7OyPndfvvHPWIOwEAAAAAMaBOTsBAAAAgLEg7AQAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAA+P/t3X2wp2Vdx/H3ZwFzUGPxMa0GDBktn8oa04YMFguxicaQnBElEnychqdSIkU2K54MNWXULRAESmQQUTHcdFo2xLGEVARS8LirICEIyyrCosC3P67rB7+5OU973D179tf7NXPmPvd9X/f1cP9+f/zmO9d1faWJYLBTkiRJkiRJ0kQw2ClJkiRJkiRpIhjslCRJkiRJkjQRDHZKkiRJkiRJmggGOyVJkpawJCuT1Lbux9aWZMckpya5MckDSS7u1yvJym3bO0mSJG0vDHZKkiQtkiSH9uDd6G9TkpuTrE5yRJLHbOs+jiTZuQda955n+b37mF6+wCZfA7wZuBD4E+DdC6xnu5dkrySXJvlu/458J8mnkrxyW/dNkiRpqdtxW3dAkiTp/6G3A+uAnYCfA/YG3gMck+SAqrp6rOzfAicvdgeBnYET+v+XLUJ7K4DvVtXRi9DWkpXkIOCjwFeAfwA2AE8FXgS8FviXbdY5SZKk7YDBTkmSpMV3aVVdOXZ+UpIVwCXAJ5P8clXdA1BV9wH3zVZZkmXAI6pq01br8db3RODObd2JJWAlcB3wgqr68fiNJE9crE4kCfDI0fdQkiRpe+EydkmSpCWgqv4d+BtgN+BVo+vT7dnZl4ufnuTgJNcC9wIv6fd+PsmHknwvyb1Jrk3ymmF7SR7Z676+L5X+3yQXJdkjye7Abb3oCWPL7lduzphGfU/ytCRnJ7kzycYkZyXZuZfZvY9vH+CZY23tPUOdZydZP1Nb01x/VZKrktyT5I4k5yf5xUGZy5Jck+RXkqxJcndfQv6WzXlvY2WWJTmqv/tN/bNYlWTXeby2PYAvDQOdAFV166Avy5IcmeRrvZ3bknwmyW+MldkxyfFJpvr3YX2SE5P8zKCu9UkuSbJfkiuBe4DX93vLk7yn76d6b5JvJjm2B9klSZKWFH+gSJIkLR3n9uPvzaPsCtq+lh8FjgTWJ3kS8EXgxcDp/fo3gTOTHDV6MMkOtFmkJwBXAX9OWzK9C/AsWqDzjb34x4FX97+LFjiuC4DHAMf1/w/loSXyt/W6vw7cNNbW/yywrQcleStwDnADcAxtq4B9gf9IsnxQfFfgM8BXae/j68ApSfYfq2+u9zayCngncAXtMzgLOBhYnWSnObr9bWDfJL8wjyGe2cd0I3AsbbuDTcALxsqcAbwD+G/gaGAt7XM4f5r6ng58BPhs7/dXelB6LS0Afw5wRB/XScC75tFHSZKkReUydkmSpCWiqm5KspE2u28uTweeXVXXjS4kOQPYoV+/vV/+YJKPACuTrOrLkg+hBf2OqarxREAnJ0lVVZILgQ8AV1fVeT/l0L5cVYeN9fNxwGHAsVX1I+C8JIcD92+BtkZt7Ab8NfC2qjpx7PpFwJeBNwEnjj3yFOCQqjq3lzuTFng8DLi0l5n1vfXn9gIOBw6uqgf310yyhhZMPYjZ9908hRbEnEpyBfB54N+AL1TVA2P17UMLGr+3qo4ce/60sb48l5bs6Yyqem2///4ktwJ/kWSfqloz9uzTgJdU1eqxdt5G+z7+WlXd0C+vSnIz8OYkp1XVjbOMR5IkaVE5s1OSJGlpuYs2C3IuaweBzgAHAp/qp48f/QGrabMPn9eLHwh8H3jfsNKqethS8C3gg4Pzy4HHJfnZrdDWyB/RfuteMHgXt9Bmeu4zKH8X8GCgtS8j/y/gl8bKzOe9HQRsBD47aPeq3saw3WE9H6JtSXAZsBdwPO193ZDktwZ9KVpAd6a+vLQfhzMwT+vH3x9cXzce6Bwbz+XAhsF4PkcLrL9otvFIkiQtNmd2SpIkLS2PBm6ds1TL5j7uCcBy4HX9bzqjBDd7AN/oyY8Ww3cG5xv6cVfgB1upzT2B0AKb0/nJ4PymaQK9G4DnjJ3P573tSQssz/QZzplkqAccV/cl5L8OvAJ4A3BJkmf0vTv3AG6uqjtmqWo34AHaVgbj9d+S5M5+f9zwOwVtPM/hoT1chxYtaZIkSdJ8GOyUJElaIvo+jbswCE7NYJgle7Ri5zzgwzM8c/UCu/bTun+G61lAXTPNPN1hcL6sl91/hvbvGpxvqT4uowU6D57h/kxBw4epqrtpsyovT/J92l6h+zPz5ztjVfMsN13m9WW0PTxPneGZ6zezL5IkSVuVwU5JkqSl49X9OFxKPB+3AT8Edqiqz81Rdgr4zSQ7VdVwhuPI1ljOviVsoM1gHRrOUpyiBSrXVdWWCsjN571N0RJEXdH3R91SruzHJ4+1s1+Sx84yu/PbtGDlnowlfOqJrJb3+3OZAh49j++UJEnSkuCenZIkSUtAkhW0/RnXAf+8uc9X1f3Ax4ADkzxreD/JE8ZOPwY8HvizacqNZjLe3Y/LN7cvW9kUsEuSB5eXJ3ky8LJBuYtoszVPGBvTqHx6kqTNNZ/3dgFtlunx05TZcZos8MMy+85wa7T/5jfG+hIeymo/XV/+tR+PGhQ5ph8/PVtfuguAFybZb5p2lidx8oQkSVpS/HEiSZK0+PZP8gzab7EnASuA36XNtDugqjYtsN6/pCXA+c8k/wRcBzyWlpjoxf1/gHNomcXfleT5tKXSj+pl3g98oqruSXId8Iok1wN3ANdU1TUL7NuWcj4tY/nHk7wX2Bl4I2059SgBE1U11TOJnwTsnuRi2szXp9ICo/8I/P1mtj2f97Y2ySrguCS/Ssuk/hPa7MqDgCOBC2dp4xNJ1tESTU2N1f8HwJf6dapqTZJzgSOS7EnL9L4M+G1gDXB6VX01yYeB1/Ug61rg+bQM7RcPMrHP5J3AAbT9Qs+mJVp6FPBs4OXA7rSkTZIkSUuCwU5JkqTF945+/DEtiPg12uy7s6rqhwuttKq+14Nwb6dlI38TcDtwLXDsWLn7k7wUeCvwSlpm79uBz/e+jBxOyzz+buARtMzf2zTYWVW3J3kZLcP4qbSZsMfRgonPG5Q9uQdqj+ahGZA30gKQn1xA2/N6b1X1hiRXAa8HTgTuA9bT9lO9Yo5mDgf+EPhj4Cm02ZvfAv4OOGWQHOlPafuwHkYLSm6kLXf/wqC+bwGH0oK8t9ACwA/L4j7DmO9O8jvAX9GCtYfQkkpdT3unG+dTjyRJ0mLJw5NOSpIkSZIkSdL2xz07JUmSJEmSJE0Eg52SJEmSJEmSJoLBTkmSJEmSJEkTwWCnJEmSJEmSpIlgsFOSJEmSJEnSRDDYKUmSJEmSJGkiGOyUJEmSJEmSNBEMdkqSJEmSJEmaCAY7JUmSJEmSJE0Eg52SJEmSJEmSJoLBTkmSJEmSJEkTwWCnJEmSJEmSpInwf3oL5dOrLCVrAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.scatter(mean_train_influences, mean_arnoldi_train_influences)\n", + "plt.scatter(\n", + " mean_train_influences[:num_corrupted_idxs],\n", + " mean_arnoldi_train_influences[:num_corrupted_idxs],\n", + " facecolors=\"none\",\n", + " edgecolors=\"r\",\n", + " s=60,\n", + ")\n", + "plt.xlabel(\"Direct Influence Score\")\n", + "plt.ylabel(\"Arnoldi Influence Score\")\n", + "plt.title(\"Influence of training points - Arnoldi vs direct method\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "395c9f40-d388-4f3d-8082-e862c8e9e1e2", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pearson Correlation Arnoldi vs direct 0.9900823761476388\n", + "Spearman Correlation Arnoldi vs direct 0.9823966546590421\n" + ] + } + ], + "source": [ + "print(\n", + " f\"Pearson Correlation Arnoldi vs direct\",\n", + " pearsonr(mean_arnoldi_train_influences, mean_train_influences).statistic,\n", + ")\n", + "print(\n", + " f\"Spearman Correlation Arnoldi vs direct\",\n", + " spearmanr(mean_arnoldi_train_influences, mean_train_influences).statistic,\n", + ")" + ] + }, { "cell_type": "markdown", - "id": "97e83f8f", + "id": "a4017f6afd3ebf93", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, + "source": [ + "### Nyström" + ] + }, + { + "cell_type": "markdown", + "id": "a1c962f5fc8ae934", "metadata": {}, "source": [ - "### EK-FAC" + "Similar to the Arnoldi method. the Nyström method uses a low-rank approximation, which is computed from random projections of the Hessian matrix. In general the approximation is expected to be worse then the Arnoldi approximation, but is cheaper to compute." + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "f68a046f672bbfc1", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-output" + ] + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Encountered error in cholesky decomposition: linalg.cholesky: The factorization could not be completed because the input is not positive-definite (the leading minor of order 19 is not positive-definite)..\n", + " Increasing shift by smallest eigenvalue and re-compute\n" + ] + } + ], + "source": [ + "nystroem_influence_model = NystroemSketchInfluence(\n", + " nn_model,\n", + " F.cross_entropy,\n", + " rank=30,\n", + " hessian_regularization=0.1,\n", + ")\n", + "nystroem_influence_model = nystroem_influence_model.fit(training_data_loader)\n", + "nystroem_train_influences = nystroem_influence_model.influences(\n", + " *test_data, *training_data, mode=\"up\"\n", + ")\n", + "mean_nystroem_train_influences = np.mean(nystroem_train_influences.numpy(), axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "a1bd4c9f39629e5a", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Percentage error of Nyström over direct method:36.14298701286316 %\n" + ] + } + ], + "source": [ + "print(\n", + " f\"Percentage error of Nyström over direct method:{np.mean(np.abs(mean_nystroem_train_influences - mean_train_influences)/np.abs(mean_train_influences))*100} %\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "d414f021ba9ca35e", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABTsAAALGCAYAAACK4UUfAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8WgzjOAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB300lEQVR4nOzdd5xlZX0/8M93d+mwS1EBQV3UKGrsXUMEeyRKVGJLEaMGSwxEf0Y0ltjixk5irLFgIjbEFmzRgGKJvcaurAJSBXZFmuw+vz/OGbgMM7szd+60u+/363Vfs/ec55zznZl75u585inVWgsAAAAAwHK3YrELAAAAAAAYBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMBWEnAGOhqlZX1b9U1fqqurKqWlXdrqoO7v/9j4td47ipqvtX1Rer6qL+a/yhxa5psqo6paraCM7zj/3nePDcq1o+qmpt/3m/Y7FrgcU23f1QVe/ot69dnMq2HaP6mT6H62+T7wUAy42wE4BF0//CMKpfWl6e5GlJvpvkZUlemOTsEZ2bSfpf6j+c5IAkb0v39X7PVo45ov+eHzHvBbLk9H+IWL/YdUw2EF60qnrKNG0mXrsvWaCa3CvMyVK937ZGcAzAKKxa7AIAYET+OMmPW2sPHtxYVQcuUj3j7r5JdkzyjNba8YtdzBb8ZZKdR3Ce16ULc385gnMtJ2cmuUWSDYtdyAJ5QVX9R2vtN4tdCMvKs5OsS3e/AACLTM9OAMbF9ZOctdhFbEOu33/81aJWsRWttV+21n44gvOc31r7YWvtklHUtVy01n7Xf97bwr310yTXS3LMYhfC8tJaO6u/T3632LUAAMJOAJaYwTnR+n+/p6rOr6rLquprVfXHk9pPzN9VSe41MBz1lK1cZ9ohfluak6uqDuxrO72qrqiqc6rq+Kq6+RRtrxqOV1VHVtV3+8/jnKp6c1Wtmeb6+/fzj/6kqi6tqguq6itV9bxp2r6uqn5eVZdX1a+r6iNVdectff7TXPcRVfW5qtrQX/e7VfXsqtphoM3B/df7hf2mkwe+5tf6eg0cd0qSt/dP3z5wzFXDFQe/7lX1mKr6clVdPPh96of3fqD/fC+tqo1V9YWq+vPprjt5qoQamMe1unldT6pu3tFLquqzVXWPKc4z5Wti4rVWVdfpv6dn9d+H/6uqx01T0w79+Sa+Z6dV1Uv67Vt97Q6cZ/BeObCqPtS/Vn5bVZ+vqvtv4frH9N/fS/qv4alV9YgtXWPS9hm/tgdeMzdKcqNJ3/t3DLQ7qKo+WlVn9F+Xs6vqf6vqBTP5eozAv6YL7/+uqvbfWuOqenf/Odxrmv0P7/e/bmDbjfuvz08H7u3vVtUbq2qvvs0pGc29sm9V/Vt1P+uuqKrzqurEqrrjFj6nR1fVyf39cFlV/aCqnlsDPwMG2k689veuqrf13/vfVjeP70F9m12q6hVV9YuB++JPt/a17Y/dr6o2VdU3t9Dm430dvz+w7SFV9ZmBe/FX1d3XU05RMM15d6uqV/evxcuq6odV9fRM87tTTTH0etL9ebOqem9VnVtVm2vg50hVPaCqPlbd+9zlVfWz/mu2+zTX2uL7w0zvty187hOfywFV9TdV9f3+a7C+qp5TVdW3+9P+ur/tP6/XVdVO05xzRu+bfd2P7Z+eNlD3+inOuaqv5yf91+30qvrnqtp+mhruU1Wf6L9el1fVj6tqXU3/PnzHvv1vqvsZ+emquvvWvn4ALA2GsQOwVN0oyVeS/DzJfyTZM8kjk3y4qu7bWju5b/eOJKckeUGSX/TPk2T9qAuqqgcmOTHJdkk+mq4n2P5JHpbk0Ko6pLX2jSkOfXmSB/THfCrJIUmemOSmSe496Rp3SvLJdJ/v5/rr7Zzklkn+McmLB9reoT/fnv0xJya5TpI/SfL5qnpoa+1jM/zc/indUMzzkxyf5OIkf5Tkn5I8oKru31q7It3X9YVJDk5yryTH5eqv9fpM7x1JLkpyWLq5Pr81sO+iSW2fkeR+6b5eJycZ/GX0DUn+L93X5qwkeyV5UJL/qKqbt9auFQhvwZ2S/H2SLyX59yQ3TPLwJJ+pqtu11n40w/PsnuQLSa5IckKSHZL8aZK3VdXm1tpxEw37oOADSQ5N8pN0w+O3S3JEklvNovZBB/Sfw3eTvCnJvunulY9X1WNaa+8duP726V4r90rywyT/lu71dXiS9/af93Nmce2ZvLbXp3vNHN0/f+3A8d/q63pgkpOSbEzykXTDgfdMN4T+Kbk6XJ9PlyR5XpK3Jnlprg5dpvOGJI9K8tdJPjvF/iP7j29MuvAxyVeTrE7ysXSvgx3Tff/+It1r4dcZwb1SVQck+Xy6Htj/k+TdSW6Q7nV5aFU9vLX2X4Mnqqq3JXlckjP62i5Kcrd0P3PuU1X3a61dOen6u6d77f+mv8ae/dfkk30w9KZ+23+le50/Ot3r7PTW2v9O8TW7SmvtzKr6dJL7V9WtW2vfnVTvvv3n/vXW2vf6bX/dX/Ps/mtyfrreurfpP7fXb+ma/Tl2SPKZJHdO8u0k7+o/z+elu29m6yZJvpzkx/25dkr3Ok91Qf4/Jrkg3dfo3L7W/5fkQVV199baxoHaZvL+sD5bud9m6JXpfs5P3NsPSXdfbF9VF6Qbtv+hJKem+z48NcnKJE8ePMks3zdfmO7967ZJjs3Vr/eLcm3HJzkoycfTfT0flO7n+fXSfa8Hazgy3f362yTvT/d1PjjJs5I8uKru2Vq7aKD9PZJ8Osn2fe0/TXK7dP/X+J9pvl4ALCWtNQ8PDw8Pj0V5JGndW9E1tq2d2J7kBZP2PaDf/rFpznXKFNsP7vf946Tt65Osn6auf+yPOXhg2x5JLkz3y/MtJ7X//XTh4DcmbX9Hf55fJrnhwPZV6X5RbUnuMrB9+ySn9dsfM0Vd+086x0+TXJbkXpPaXT9dWHRWkh1m8H24+0Cd+0y6xkf7fc/Z2tdoBtc5oj/miK183X+b5PbTtLnJFNu2TxdO/C7JfpP2nTLFa2ziNXGtWtIFVC3J62fy+Q6c59+TrBzYfsskVyb5/qT2f9G3/1yS7Qe2754ufJzydTzN12LtwPVfMWnfnfqvx4VJVg9sf3bf/mNJVg1sv166e6IluccU13jHXF7bM7jnPtAfc9sp9l1npq+xYR4D39snpOu5950km5LcborX7ksmHfu9dPfgXpO23zjJ5iRfGNj2tP4cR01Rwy5JdhrVvZIuEGtJ/mHS9nv0r8tfJ9l1iuudOFjHpGsdNWn7xGvvjUlWTPEavyDdz48dB/Yd1O/74Ay/N4/u279yin3P7Pc9bWDb15NcnuR6w76OkjynP+8HJn1eB/Sf05buh7XT3J//NMV1Dun3fTHJ7pP2TXw/XjOwbcbvD1u737by+U98Lusz8PM03c+o8/vX3HlJbjGwb4ck35/8tc/c3jfXTlPfKf3+ryfZc9I99NN09+7g+9iN+ro2Jjlw0rle35/rzQPbKlf/LD5sUvujBr6nB8/2a+vh4eHhsXAPw9gBWKp+keQaKx+31j6ZLly5yyLU85fpftl7QWvt+5Pq+l6StyS5fVXdcopjX9Ra++VA+ytz9TDVwc/lwel+Qf5Im2LRn9baGQNPD03XY+hfW2ufndTuV+l63O2T5D4z+Nz+qv/4ktbaVSvY93U+I11o84QZnGdU3txa++ZUO1prP5ti2xXpeiiuysw+3wlfaK29Y9K2t6ULg2bzGrskydNba5sGavp+uh5vt6iqXQfaPrb/+Ny+7on2F2Wg1+4sbUjyosENrbWv5eoeaQ8d2PVX6X5Rf3ob6KXXWjt34Pqz+V7P9LU9U5dO3tBaO3+I8wyltbY5XYi2IskrZnDIG9IFPUdM2v7EdKHJm6Y4ZqrP8bettWttn4Fr3SvVDcG/f7qflS+fdJ0v5uoemA8b2HVUutf9X01Rx4vThaN/NsX1L0nyzP7rNuH4/lx7pAtILxu4/qnpQrTbzezTy4fSvb7/rKpWTtr32HSB/rsnbb+y334Ns3gdPS7dz7y/H/y8WmunJfmXGZ5j0DmZumfy3/Yfn9gGehX213pHul6Yg1/z2bw/jMKLW2tXLbjU1/iRdD1J39Ba+8HAvsuTvDddIHuLgXPM5X1za57VWrtg4Hy/Tfczb0W6P/ZM+PO+rte1a8/f/A/peiX/RV09VcM9ktw8yedaax+e1P51Sa71HgTA0mMYOwBL1bcGw6MBp6fribjQJq5526r6xyn236z/eIt0PVwGfW2K9qf3H/cY2Ha3/uPHZ1HPjaap5/cG6tnaUPY79B+vNTyvtfbjqjojyQFVtaa1thCrcn9luh1VdcN0Qw/vk27Y+eQ54vabxXWu9X1prf2uqs7JNb8vW/OTNjDUdMDg9/ji/t+3TxekfHGK9p+fxTUHfaNNvXr4KekCodsnOa6qdks3vPzMKX7pT67+/t9+Ftee6Wt7a96VLnz7clW9N92Q7C/MNMDp5zc8eopdr50cJG1Na+2TVfWpdMOnH9S2PBXEO9MN5/3rJK/qa5mYluDCJO8baPuRdNNC/FtVPSBd78svpOv922ZT44Cp7pWJ79+pbeoFc/4nXQB0+yTvrKqd0w0bPj/J0f2UjJNdnmuGWBN+PPm111rb1N9Du7TWfj7FMWcmuetUF5mstXZpVb0vXXj8gPQ/y6qbd/RW6XqIDoaY70r3ffh+Vb0n3fQCX2itnTeT6w3cI6dP9YeVXD1lymx8uw8DJ7t7ulD2T2vqeUy3T3LdqtqrtfbrzO79YRSmurcnFqT7+hT7JoLRwflu5/K+OUx9U/3s2dL724XVzQn7h0kOTDdtwUT7a01N0b+2P5/uD40ALGHCTgCWqoum2X5lFmeBvb36j0/cSrtdp9h20RTbJnrVDfZW2r3/eGa2bqKerS32MVU9k63pP0634vZZ6YLF3dP1sppvZ0+1sapunC7c2SPdPHGf6uvZlK7H02PT9bKbqYum2X5lrvl9mct5Mulca5Jc0K4992HS9QAbxnTHTXwd10z6uKXvc3L163AmLppi21Sf9xa11k6sbvGxZ6TrfXpkklTV15M8u7X231s5xe6ZOoR6xzQ1bs0zk9w3ycur6pPTNWqt/aaq/jPJk/q5B09ON7fhPumC1sFejb+oqrukGxb+wFzds/L0qnpla22YXoNT3Suz/T7vka4X6nUz+yBvup8HV25l32x+B3lHup+7j83Vf7iZ6CF93GDD1tqrq+r8dPO8/m26ALxV1WfT9UCdKiAbNPG129o9NRvTHbNXuq/D1r7mu6brWbt7/3wm7w+jMNX378oZ7NtuYNtc3je3aJo/Ykz3MzeZ+f0wH68BABaYsBOAbdXmdD1nprL7FNsmfrm7bWvtO/NS0dWhzEx6J07Uc1hr7SNzvO7EufbJ1EP09p3Ubr5N18vt6el+eX7c5OHnVfXobH1BmaVgY5I9q2rVFIHn3kOec7rj9uk/bpj0cZ8p2iYL/32+htbaSUlOqqpd0vX8++N0i538V1XdfvIw2EnHrk8X2I2qlu9U1XHphjT/VaYYFj3gDUmelC6gPTlXL0z05inO+4Mkj6yqVel6U9433Vyex1bVb1trb51tqVNsm+33eeLjN1trd5ii/aJqrX2xqn6S5CF9D97fppvL8/xM0Wu9tfbOdD1Wd083JPmh6b6Hn6yqA7fSy3Pia7G1e2pWn8IWrrWitbbnDM9zUf9xNr3XF9tCvG/OtIZ90i1uN9l098MoXwMALDBzdgKwrbowyd79kNPJ7jTFtomVgw+av5KuusYfzaLtKOqZmPPv4Mk7quqm6YYlnjbb4cBTmJiWYDa9JgfdtP/4gSn23WvIcy60b6b7/9c9ptj3B0Oe8w798NvJDh64Zvrhxj9Lsl9V/d4U7Q/pP35jin2jsCkz+N7381f+T2vt6emGfW+fmd0To/bcdHNSvijd4idT6kOcLyR5aFXdNV2A+bnBOQ2nOObK1trXW2v/nC64S7pVqCfM5V6ZuJ//oA9VJ7vG97m1dnG6EOhWVTXT4G2hHZdu5fpHppuv+DpJjp9mmH6Srudfa+1jrbUnpusdume64crT6u+Rn6a7R6YaqnzwUNVP7X+T7FFVt5pF+2Tm98KM7rd5Nsz71FzfJybb0vvb7unmj70sycT9OvHz71rvKf28scP+nAZgAQk7AdhWfSXdCIfHDW6sqiOS3HOK9m9P17PmBf1Q1GuoqhVVdfAca/pousU7HtL3VJx8jcG50D6cLrh6alU9aKqTVdXd+/n4tuZt/cfnVtV1B45fmeSV6f6/MNseZ1P5df/xhkMev77/ePDgxn7+w4VcQGku3tl/fElVXdWzuKrWJHnekOdck+T5gxuq6k7pFjfZkOSDA7velq4H5CsGF3ypqusMXP9tmR+/TjcH4eR5VlNVfzhNMDfRu+qSeappWv1CX69K15Pr6K00f0O6UPYD6b6+b5zcoKru2H+fJ5vqcxz6XunnOf3vdFM7HD2phrsmeUy6P/YMvi5e3df/tj4Amlz7HlW1mL0+35muN/5f9o+kCzCvoaoOqaknHb1e/3Emr6O3p/uZ989VddXvSlV1QK5eVGgUXtN/fEtVXX/yzqraparuNrBpNu8PyRbutwU0zPvmXN8nJvvPdD2zn9b/8W7Qi5OsTvKfA/OqfjHJj5L8YVUdNqn938R8nQDLgmHsAGyr/jVd0PmGqrpPuoUNbpduQYX/SjeE9iqttV9X1eHpAoL/rarPpOsN1ZLcoD9ur3S9j4bSWruiX6jiU0mOr6oj0/WM2THdAg73Sf/e3S+k87B0i5ycVFVfTLd67yV9PXdOcuN0Q/S2+At+P0z05Un+Psn3quqEdENF/yjJ76dbOGcmK1NvzZf6Wo6uqr1y9dxn/zrDhY9en+579v6+xl/19T0w3UIwjxxBjfPtnUkela7m71XVR9LNcffwJF9Ntwrw5ukPn9LnkjyhD7K+kO57/sh0gc2RkxZPemW67+thSb5dVR9Lt7ryn6YLhF7eWht2oaSt+Uy61+Unqupz6Ra9+XZr7aPpVrner6q+kC7QuSLJHZPcO8kvkrxnnmrampenW3xockgy2fvThVf7pRtefeIUbf4iyZH9Aic/Sxc43iTdKtuXJ3ntQNu53itPSvdaeEVV3T/dYi43SPd93pxuKoirFhZqrb2tX/TnKUl+1s9T+st0vSEPSNcj8u39eRdca+30qjo53c/AK5N8d/Iq9L0PJrm4qv433euo0vUqvHO6RXU+PYPLvSpdL9uHJ/lG/7XYPckj0t1rD5nL5zKhtfaZqjomycuS/KS/F09LN3/ljdL1LPx8up8Vs3p/6G3pflsQQ75vfibdnLlvqaoPpFst/aLW2uuGrGF9VR2d5N/SfT/fl+S8dF/fuyf5YbpF7ybat6p6fLo/GHygqk5M19v3dum+xp9I/z0BYOkSdgKwTWqtfb+q7ptumOyD0/0CfWq6X34elklhZ3/MZ6rqNkn+X7qVgQ9KF8r8Kt1Kr1MNr55tXV+rqtslOSZdKHWPdL/s/TSTeu/18wreNt1cln+cLgjcnG7BhW+mW/hicKXiLV33Wf2qtH+TrufUdukCmecmeVVr7YoRfG4XVtXD+7qOyNVDg/8zM5gnsv98D0nyknRDWVelWz33Yel6Dy35sLP/RfqhSZ6TLvx6Wrrv13Hpwtw/STev52ycli6EWtd/3CHdUMwXtdausbhOH5jcL91r5jH99a9M93U8urX27uE+sxl5SbrQ6MHpek+vTPd5fzTdffjQdFNI3Dfd6/iX/fbXttYunMe6ptVau7iqXpApempOandFVb0rXU/Kd0yz+va7031v7pEuyN0p3WIz70l3j31v4HxzvVd+3vfufW6SB6XrDb0xXVDz0tbaV6c45qlV9fF0r6H7pvteXZDu+/CK/tqL6R25OtA7bpo2x6T72XyHdJ/3ZenC8mclecOWhr1PaK1d3r83/GO6nylHpQtOX5IutBtJ2Nlf65/7gP9v0w2PPizd9/fMdHO+Hj+p/YzfH7Ll+23BzPZ9s7X2yap6RrpFjY5O1+P4F0mGCjv7c76+qn7a1/DwdH/gOT3d6/qfJk/R0lr7QlUdlOSluXragC+nu48eEGEnwJJXrU03ZzYAAAulDyE/lWRda+3ZM2i/Nl3QeVxr7Yj5rY6tqapT0vWAvHlr7SeLXA4AwDbLnJ0AAAtomvn59krXMzO55lyKLAP9fIT3SvJJQScAwOIyjB0AYGG9up9+4Ivp5o7bP91QyT2TvKm19pXFLI6Zq6onp5unc2IKiRcsbkUAAAg7AQAW1onpVuB+cLo59S5Lt2jHWzOaVe9ZOM9KF1b/PMlfCKoBABafOTsBAAAAgLFgzk4AAAAAYCwIOwEAAACAsWDOzlmqqkpy/SS/WexaAAAAAGCZ2i3Jr9qI59gUds7e9ZOcsdhFAAAAAMAyt3+SM0d5QmHn7E306Nw/encCAAAAwGztlq4z4cizNWHn8H7TWtu42EUAAAAAwHLSzRI5PyxQBAAAAACMBWEnAAAAADAWhJ0AAAAAwFgQdgIAAAAAY0HYCQAAAACMhWUddlbVH1bVR6vqV1XVqupPZnDMwVX1jaq6vKp+WlVHzH+lAAAAAMB8W9ZhZ5Jdknw7yVNn0riqDkhyUpKTk9wuyWuT/HtVPWCe6gMAAAAAFsiqxS5gLlprH0/y8SSpqpkc8qQkp7XWntE//0FV/UGSv0vyyXkpEgAAAABYEMu9Z+ds3T3Jpydt+2S/fUpVtUNVrZ54JNltPgsEAAAAAIazrYWd+yQ5Z9K2c5Ksrqqdpjnm2Uk2DDzOmL/yAAAAAIBhbWth5zBelmTNwGP/xS0HAAAAAJjKsp6zcwhnJ9l70ra9k2xsrV061QGttcuTXD7xfIZzgwIAAAAAC2xb69n5pST3mbTtfv12AAAAAGAZW9ZhZ1XtWlW3q6rb9ZsO6J/fsN//sqp658Ahb0xy46p6eVUdWFVPSfKIJK9Z2MoBAAAAgFFb1mFnkjsl+Wb/SJJX9/9+Uf983yQ3nGjcWjstyaHpenN+O8kzkjyhtfbJhSoYAAAAAJgf1Vpb7BqWlapanW5V9jWttY2LXQ8AAAAALCfzma8t956dAAAAAABJhJ0AAAAAwJgQdgIAAAAAY0HYCQAAAACMhVWLXQAAAAAA0Fl7zEkrkxyUZN8kZyU5df26QzctblXLh56dAAAAALAErD3mpIclWZ/k5CTH9x/X99uZAWEnAAAAACyyPtA8Icl+k3btl+QEgefMCDsBAAAAYBH1Q9eP7Z/WpN0Tz1/bt2MLzNkJAAAAAHM0x7k2D0qy/xb2V5Ib9O1OmUOZY0/PTgAAAACYgxHMtbnviNtts4SdAAAAADCkEc21edYMLzfTdtssYScAAAAADGGEc22emuSMJG2a/S3J6X07tkDYCQAAAADDmZhrc3LQOWFwrs1p9XN7HtU/nRx4Tjw/ehZzgG6zhJ0AAAAAMJyRzbW5ft2hJyY5PMmZk3adkeTwfj9bIewEAAAAgOGMdK7NPtBcm+SQJI/pPx4g6Jy5VYtdAAAAAAAsUxNzbe6XqYeyt37/jOfa7IeqnzKK4rZFenYCAAAAwBDMtbn0CDsBAAAAYEjm2lxaqrXpVrRnKlW1OsmGJGtaaxsXux4AAAAAFt/aY05amW7V9X3TzdF5qh6dU5vPfE3YOUvCTgAAAAAY3nzma4axAwAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIyFVYtdAAAAAAALZ+0xJ61MclCSfZOcleTU9esO3bS4VcFo6NkJAAAAsI1Ye8xJD0uyPsnJSY7vP67vt8OyJ+wEAAAA2Ab0geYJSfabtGu/JCcIPBkHwk4AAACAMdcPXT+2f1qTdk88f23fDpYtYScAAADA+Dsoyf65dtA5oZLcoG8Hy5awEwAAAGD87TvidrAkCTsBAAAAxt9ZI24HS5KwEwAAAGD8nZrkjCRtmv0tyel9O1i2hJ0AAAAAY279ukM3JTmqfzo58Jx4fnTfDpYtYScAAADANmD9ukNPTHJ4kjMn7TojyeH9fljWqrXpei8zlapanWRDkjWttY2LXQ8AAADAbKw95qSV6VZd3zfdHJ2n6tHJQprPfE3YOUvCTgAAAAAY3nzma4axAwAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaWfdhZVU+tqvVVdVlVfbmq7rKFtkdUVZv0uGwh6wUAAAAA5seyDjur6pFJXp3khUnukOTbST5ZVdfbwmEbk+w78LjRfNcJAAAAAMy/ZR12Jnl6kre01t7eWvt+kicluSTJX23hmNZaO3vgcc6WLlBVO1TV6olHkt1GVz4AAAAAMCrLNuysqu2T3DHJpye2tdY298/vvoVDd62qX1TV6VX14aq61VYu9ewkGwYeZ8ytcgAAAABgPizbsDPJdZKsTDK5Z+Y5SfaZ5pgfpev1eViSP0/3+X+xqvbfwnVelmTNwGNLbQEAAACARbJqsQtYSK21LyX50sTzqvpikh8kOTLJ86Y55vIklw8cM89VAgAAAADDWM49O89PsinJ3pO2753k7JmcoLX2uyTfTHLT0ZYGAAAAACy0ZRt2ttauSPL1JPeZ2FZVK/rnX5ruuEFVtTLJrZOcNR81AgAAAAALZ7kPY391kuOq6mtJvpLk6CS7JHl7klTVO5Oc2Vp7dv/8+Un+N8lPk+ye5JlJbpTk3xe6cAAAAABgtJZ12Nlae29VXTfJi9ItSvStJA9srU0sWnTDJJsHDtkjyVv6them6xl6j9ba9xesaAAAAABgXlRrbbFrWFaqanWSDUnWtNY2LnY9AAAAALCczGe+tmzn7AQAAAAAGCTsBAAAAADGgrATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGwqrFLgAAAABYPGuPOWllkoOS7JvkrCSnrl936Kat7QNYiqq1ttg1LCtVtTrJhiRrWmsbF7seAAAAGNbaY056WJJjk+w/sPmMJEf1/55y3/p1h564MBUC42g+8zVh5ywJOwEAABgHfdB5Qv+0Bna1gW1tmn2HCzyBYQk7lxBhJwAAAMtdPzx9fZL9cs0wcyZauh6eB/TPDXMHZmU+8zULFAEAAMC256B0w9NnG3SmP+YGSZ6TLjA9Ocnx/cf1fY9RgEUh7AQAAIBtz74jOMeL0vUMHbRfkhMEnsBiEXYCAADAtuesEZ1ncs/Qieev7YfKAywoYScAAABse05NN+/mMAt5bO2YiWHuBw1xboA5EXYCAADANqZfROio/unk8LINbJtq30yNYqg8wKwIOwEAAGAbtH7doScmOTzJmZN2ndFvf/g0+14ww0uMaqg8wIxVa8P0WN92VdXqJBuSrGmtbVzsegAAAGAu+rk1D0rXE/OsJKf2PT+n3Ncftj7dYkRTrebe0oWiB0ycB2DQfOZrws5ZEnYCAACwretXWz+hfzoYeE6EDIf3PUcBrmU+8zXD2AEAAIBZ2doQeEEnsFj07JwlPTsBAACgs6Uh8ADTMYx9CRF2AgAAAMDwDGMHAAAAANgKYScAAAAAMBZWLXYBAAAAwPwxryawLdGzEwAAAMbU2mNOeliS9UlOTnJ8/3F9vx1g7Ag7AQAAYAz1geYJSfabtGu/JCcIPIFxJOwEAACAMdMPXT+2f1qTdk88f23fDmBsmLMTAAAAlpgRzLN5UJL9t7C/ktygb3fKkGUCLDl6dgIAAMASMqJ5NvcdcTuAZUHYCQAAAEvEVubZ/MDaY0563gyHnp81w0vOtB3AsiDsBAAAgCVghvNsvigz6+V5apIzkrRp9rckp/ftAMZGtTbdzz2mUlWrk2xIsqa1tnGx6wEAAGDhzGUuza0du/aYkw5ON2R9ayZ+kT98/bpDT9zC9SZ6iSbXDE9ndDzAfJnPfE3YOUvCTgAAgG3LQEj5kCR/nuS6A7vPSHLU1kLDPng8NtdcNOgax6495qRHp5ujcyZaf/wBWwpbp7nu6UmOFnQCi2U+8zXD2AEAAGAakxYL+rtcM+hMurk0T9jSsPKtzMM5eOxs5s8cXE19Wn2guTbJIUke0388QNAJjCs9O2dJz04AAIBtwxaGgU82bS/Lvlfo+nTB5lTnuOrY/vmW2k7lMevXHfruGbYFWBL07AQAAIAFtJXFgibbUi/Lg9INIZ/uHFcd2welR/XbZ9ozyWrqAANWLXYBAAAAsJAmLRR0Tr9571xz0aCJkHI29p3htmmPXb/u0BPXHnPS4bn2PJuTTfQItZo6wAA9OwEAANhmTJqD8/gkn+kfx/fb1vdtZhpSDpqql+VMe15e1W5gns3npws1J/fynHh+9ExXggfYVgg7AQAA2CZsYaGgQfv1bW46i1O3dCucT9XL8tR0PTCnG5Y+5bHr1x26af26Q1+c5PAkZ0465owkh1tkCODaLFA0SxYoAgAAWH7WHnPSdulCw8mrqU9lYoh4ZeuLBU38Uj1t+LiFhY62emx//OCw+8Gh9gDL0nzma8LOWRJ2AgAALC992PjGzCzoHPT8JC/s/z1d4Hl6uuHkW+xl2dcweR7OGR0LMG6EnUuIsBMAAGD56Bf7eV//dGurqk/2mCSX59oh5blJ3pXkI5lFL0s9NAE6ws4lRNgJAACwPKw95qRHJHl3hl+v4pD16w49RUgJMFrzma+tGuXJAAAAYClYe8xJ/5zk74c8fGLOzlOTbrGgJKeMpjIA5pPV2AEAABgr/dD1YYPOpBvu/m69NwGWH2EnAAAAY6Mfcv76OZ6mJXl0fy4AlhHD2AEAAFg2ZjB/5kGZ/arrk1WSG/TnOmWO5wJgAenZCQAAwLKw9piTHpZkfZKTkxzff1zfb5+w7wgvOcpzAbAAhJ0AAAAseX2geUKS/Sbt2i/JCQOB51kjvOwozwXAAqjW2mLXsKxU1eokG5Ksaa1tXOx6AAAAxtGk4ernJDkuXbBZUzSfWD39gP75+i20nbC537/F81mkCGD05jNfM2cnAAAA824Gc20Otn1YkmOT7D/D0181x+b6dYeesvaYk45K1wu0ZfrAcyLonNxmokfQ0YJOgOXHMHYAAADm1Qzn2hxsO9Vw9ZnYN0nWrzv0xCSHJzlzC20ngs7Nk7afkeTw/hwALDPCTgAAAObNLObanOj9eWz/dEtD0KdzzsQ/+rDysVtpX0lWJvm7JI9Jcki6oeuCToBlyjB2AAAA5sVWwsuJnpWvXXvMSR/uh4wflJkPXZ+JvWfY7pz16w599wivC8Ai0bMTAACA+TIRXm5p3swb9O2Sfhj6HEwON2e6mrpV1wHGhLATAACAket7dd57hs0nQs65ho6Tjz813RycbYq26bef3rcDYAwIOwEAABipgQWJnjfDQyZCyq2Fk9OZMrTsh8YfNdBm8jGJVdcBxoqwEwAAgJGZ5Wrq1wgpZxhOziq03MLK7FZdBxhD1dps/2C2bauq1Uk2JFnTWtu42PUAAAAsBf2w9XsleX+SPTKz1dRbpggc+8D02FxzsaLTk7w73arpk7cfvbXQsq/voHRD5s9KcqoenQCLYz7zNWHnLAk7AQAArmmacHKmHj5VUDldOCm0BFj+hJ1LiLATAADgagPD1pOZ9eYc1NINJz9AYAmw7ZjPfM2cnQAAAAyl72V5bP90tkHnxDE3SNdTEwDmbNViFwAAAMCydVCGG7o+2b4jOAcA6NkJAADA0K4/ovOcNaLzALCN07MTAACAa5jJIkD9XJ2vneOlJubsPHWO5wGAJBYomjULFAEAAONsmpXVNyb5QpKf9Y8zk7w3w83TOWHil9HDp1qNHYDxNZ/5mp6dAAAAJLnGyuqTQ8zVSf5oxJc7I8nRgk4ARknYCQAAwODK6nPprbk15yZ5V5KPZIqh8QAwV8JOAAAAktGtrD7ZhiRPSfKrCDgBmGfCTgAAgG3ADBYdesg8XXp1ksvWrzv0lHk6PwBcxQJFs2SBIgAAYCmbKtRM8tAkr09y3YGmZyQ5av26Q0/sjzlr0v5RmVhx/QC9OgFILFAEAADAJNOEmofl2iup/ybJblOcYr8kJ6w95qTDk1yQ+Qk6k24O0Bv0tZ4yT9cAgCTCTgAAgGWnXzV9cqi5Md2Q8cmmCjqTqxci+vck7x5dddPadwGuAcA2TtgJAACwjPRB5wlT7Joq6JyJPdItIDSMV6RbeOg1M2h71pDXAIAZW7HYBQAAADAz/dD1Y/untaW282xzkpevX3fo3yf513Rzck63IERLcnq6YfYAMK+EnQAAAMvHvdINXV+soPPiJG9PsuP6dYc+K0n6RYeO6vdPDjwnnh9tcSIAFoLV2GfJauwAAMBC63t0Pqd/7LjAl78wyTuSfCTJqdOFltPMI3p6uqDzxPkuEoDlYz7zNWHnLAk7AQCAhdSvlv7WDD8n51z92fp1hx4/k4ZTrRCvRycAk81nvmaBIgAAgCViirDwj5M8Y1GL6hYgmpE+2Dxl/koBgC2bU9hZVTskuUOS6yX5Qmvt/JFUBQAAsI2ZZhj4YmrpFh6ysBAAy8bQCxRV1d+m+0vj55OcmOQ2/fbrVNX5VfVXoykRAABgvPVB5wlJ9lvsWnoWFgJgWRqqZ2dVPS7Ja5O8J8mnkrxtYl9r7fyq+p8kjxrcDgAAwDX1w9bvleQtWbwV1qdyRiwsBMAyNOww9mck+XBr7TFVtdcU+7+e5G+HLwsAAGC8LZFh6y9Nsnu6oLUl+VKSM2NhIQCWqWHDzpsm+Zct7L8gyVQhKAAAwDZn0sJD5yT5gyT/OOLLtMy8d2hL8uskj801w9bDkhwl6ARguRp2zs6LklxnC/tvmeTsIc8NAAAwNvoenOuTnJzk+CSfSfLCdMHkKIeuXzjp+fnpQs02afvE8+vk2nOE7pfkhL5mAFh2qrXJ73szOKjqbUnuneR2SVYmOS/JfVtr/1NVt0ry5SRva62N3VD2qlqdZEOSNa21jYtdDwAAsHgm9dg8K5OGfw8sPJTM/5yc90myebCWdD01Jw+VPz3Jzkn2nKamiVXYD9DDE4D5MJ/52rBh5/XTBZqV5KNJ/jrJf6YLPh+e7o31Lq2180dX6tIg7AQAAJJp59w8I90w8BP7IHR9ut6S8xl0bjGcnCKQXZGud+nWHLJ+3aGnjLBOAEiyBMPOvqjrJfmnJA9LN6F1kvwmyQeSHNNaO3cUBS41wk4AAGALPTYnfsE6PN1aBifPcylXXW+mK6evPeakR6cbTr81j1m/7tB3D10ZAExjPvO1Wc/ZWVU7VNVDkuzTWntCa23PJHun+yvhHq21vxrXoBMAAKDvKXls/3Ryj82J569Ncv15uPzknptnZBZBZ++sEbcDgCVjmNXYr0jy/iRHJflOkrTWzhtlUQAAAItla/Nw9vv2n+rYXiW5QbpOIaPwgyQv7mv5QpJ7bqG2mTg1XUg63fD6iWHxpw5bMAAsllmHna21VlU/yZZXYwcAAFh2ppuHc+0xJx010Hty3xmebq8RlPTh9esO/ZNJ206ZywnXrzt009pjTjoq3TD8lqmH4R9tcSIAlqNZD2Pv/VOSv6mqm4+yGAAAgMUyMA/nfpN27ZfkhLXHnPSwvtfnTHtsbp5lCS9K8m9JPtl/3HGKoHMk+uD28CRnTto1zLB4AFgyhl2N/V+S3CfJzdL9VXF9kksnNWuttaPmWN9Manlqkmcm2SfJt5M8rbX2lS20/9N0Q0DWJvlJkme11j42i+tZoAgAAMbMDFZOb0l+neSybHkI+4RNSR6Y5L9nWMLL16879FkzbDsyMxiyDwAjt+RWY6+qmfyFsrXWVs6+pFnV8cgk70zypCRfTnJ0kj9NcvOpFkmqqnsk+VySZyf5rySPSfKsJHdorX1vhtcUdgIAwJhZe8xJB2f0K6ffJ8lxmT5ATbren49av+7Q94/42gCwZC2p1diTpLW2YgaPeQ06e09P8pbW2ttba99PF3pekuSvpml/VJJPtNZe0Vr7QWvteUm+keRvFqBWAABg6ZrpPJyzsXe630GSq+fCzMDzluSRgk4AGJ1h5+xcdFW1fZI7Jvn0xLbW2ub++d2nOezug+17n9xC+1TVDlW1euKRZLc5FQ4AACxFZ83DOc+bwdyYJ8zDdQFgmzWnsLOqDqiqp1TVP/ePp1TVAaMqbiuuk2RlknMmbT8n3fydU9lnlu2Tbsj7hoHHGbOuFAAAmJmq66Tqman6v1T9JlXnpeqEVN07VdMNBR+FU9P9X3/283xN79bJVYsBrU1ySLqptA5JcoBFgABg9FYNe2BVvSrdkIzJgenmqnpta+3/zamypeNlSV498Hy3CDwBAGD0qu6V5ENJdtqcOuHr+93is1esWnXd2/3qx3fa5XeXfSbJCan687R2+agvvX7doZvWHnPSUelWY2+Zfo7N2bjx4PnTLe4KAMyjocLOqnpGkr9L9x+BVyX5Qb/rFv32v6uqM1trrxlJlVM7P90Kh3tP2r53krOnOebsWbZP6/4jddV/pub3j8kAALCNqvr9JCcl+fLD/uwV//GN/W/x4kyset5aHvp/J//6lR97zWErW3trkj+fx0ouSLLXiM71sxGdBwCYoWFXY/9hkh+21v5kmv0fSnJga+3AOVW39Tq+nOQrrbWn9c9XJPllkte11tZN0f69SXZurT14YNsXk3yntfakGV7TauwAADBqVe9Lcvt7Pumtzz9zzd7vmtg60KI94jufyss//i+V5LZp7TtzudzaY07aLslTk9wkXSj5qyTvmeK6w9qUZKf16w793QjOBQBjZT7ztWGHsa9NcuwW9n8yyQOHPPdsvDrJcVX1tSRfSXJ0kl2SvD1JquqdSc5srT27b39sks/2PVNPSvKoJHdK8tcLUCsAADCVqn2SPPTKWvGMM9fs/fKJrZNbnXire7dnfvadm/a6ZMOTVyRPns0l1h5z0sokB6Vbdf3BSR6Rbg2AQaMavp4krxJ0AsDCGzbsPDfJbbew/7ZJzhvy3DPWWntvVV03yYvSLTL0rSQPbK1NLEJ0wySbB9p/saoek+QlSf4pyU+S/Elr7XvzXSsAADCt2yZZ9cwHHX1uJoauT+HKlavqMze9y8oH/egLh6yexcnXHnPSw9J1fJj23L1RBJ0tySvWrzv0WSM4FwAwS8OGne9PclRVrU/yr6213yZJVe2S5G+SPCHJa0dR4Na01l6X5HXT7Dt4im3vT1c/AACwNKxKkjNXX3ePrTX83cpVuXLFih1neuI+6DxhDrXNxCVJfprkP5Icq0cnACyeYcPO5yW5XbrekS+qql/126/fn/PkJM+fc3UAAMC24CdJcsTXP7rPV2546+lbtZY7n/5/uWzVDj+fyUn7oesT02/Nx0qjlyR57Pp1h853mAoAzNBQYWdr7ZIk96mqw5L8UZIb9bs+keRjST7ahln5CAAA2Pa09uNUfe6PfvzF+6S1M1K1X6YIJ+/+y++2A8//RV22crtrLEbah5r3SnJwv+mUJJ9NN0fn1oauz8Zvk/wiydeTHJfklPXrDt00wvMDAHM01Grs2zKrsQMAwDyoemCSj39nn5v+12F/+epDW61IBgLPm/z69Pau9/xDbbfpyp/sdenGA9Pa5uSqYepvTrLXpDP+Osk7k/zdHCub+IXpBUn+SbgJAHM3n/naUGFnVR2Q5Pdbax+dZv+Dk3y3tbZ+buUtPcJOAACYJ1VPTfKvG3fY5cxj7/no3b+178123fWKS3PoD0/NYd//bLt81fZnrL78t3dNa2cl15iPcz6GqE84PcnR69cdeuI8XgMAtilLMez8QJLVrbX7TbP/E0kuaq09ao71LTnCTgAAmD+vOugvnnPbs3783IN//rWdVnWdN/Or3a6z+Wd77f/ug9Z/66lpbUNy1dD19dn6MPVNSVZk5oHoK5OclGTfJGclOVVvTgAYrfnM14ZdoOju2fJq659JcvSQ5wYAALZBa4856WG556NekiSrL7s41734wlyxarucufq6tXnFysckOXF9MtHDcqbzca5MNxS9ZcuB56Ykr1q/7tBnDf8ZAACLbcWQx+2R5Ddb2H9xrj1nDgAAwJQmr5y+ccdd87Pr3CCn775PNq9YORFSvrZvl3Q9L2fqtUnOnLTt9CRvS/K6dPN67iToBIDlb9ienb9Mcs8kb5hm/0FJzhjy3AAAwLZnaz01K8kN+nanpBtiPlMfSfLM/ljD0wFgjA0bdr47yfOq6itJXtf6lRCramWSv0nyyCQvHU2JAADActL3vpxtsDjTnpoT7U5N18Fia0PZTx+4/ikzvAYAsEwNO4z9ZUlOTjcc5Kyq+lxVfS7Jr5K8JslnI+wEAIBtTr9C+vp0vy8c339c32/fkpn21DwrSfrw8qh0c3FuydF6cALAtmOo1diTpKpWJHlskocluUm/+WdJPpDknRO9PceN1dgBAGBqfaB5Qv90cDGgiV86Dl+/7tATp+r52e9fn2S/TL2QUEvXk/OAwfCyv+abc+01A85PcuT6dYeeGABgSZnPfG3osHNbJewEAIBr6wPM9dl6WPn0dKPBBoefn5Gul2Yyg7B0mmvfK8nB/aZTknxWj04AWJqWfNhZVdsnuWu6v8z+qLX27TmfdIkSdgIAwLWtPeakg9MNWd+aiV9Apgwz+4/H5pph6OnphqPrpQkAY2A+87UZL1BUVQ9It/DQ37fWzh/YfmC61Q1vMrDtg0ke1Vq7coS1AgAAS9dMFxhKrt3zs9IFnq9NckCSD8fK6QDAEGazGvtfJbnpYNDZe1eSmyY5LsnXkjwoyUOTPC3d8BQAAGD8zXSBoamGuE9sv0GSg9avO/SUWDkdABjCbFZjv1OSTw9uqKrbJ7l9kne11h7XWvu31tqhST6f5M9GVyYAALDEnZpu7s3p5sma6fxZs+khCgBwDbMJO/dJ8tNJ2x6Y7j8t75i0/UNJbj50VQAAwLLSDzOfWGRocrDZMn2Pzslm2kMUAOBaZhN2Xpxk50nb/iDJ5iRfnrT9oiQrhy8LAABYbvoFhA5PcuakXWck+dNsvefn6el6iAIADGU2YecPkhw28aSq9kjyh0m+2Fq7eFLbGyQ5e+7lAQAAy0kfeK5NckiSx/QfD1i/7tATsuWen0m34rqFiACAoVVrM5s6p6oenG5VxE8m+WKSBye5Y5LHtNbeO6ntF5Oc1Vp7+GjLXXxVtTrJhiRrWmsbF7seAABYTtYec9LDkhybZP+BzaenCzpPXJyqAICFNJ/52ozDzr6Q/5fkuUlWJ7k0yStaa/84qc3d0oWhT2itvW10pS4Nwk4AAJibtcectDLJQekWIzoryal6dALAtmPJhJ19MSuTXCfJuW2Kg6tqp3Rze17UWhu7/7AIOwEAAABgePOZr62a7QF9gHnOFvZfmq7XJwAAAADAgpnNAkUAAAAAAEuWsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMbCrFdjH1RVd0tySJLrJXl9a+0nVbVzkgOT/Li1dvEIagQAAAAA2KqhenZW1fZVdWKSLyR5aZK/TXKDfvfmJJ9KctRIKgQAAAAAmIFhh7G/OMkfJ3lykpsnqYkdrbXLkrw/yWFzrg4AAAAAYIaGHcb+6CRvaK29uar2mmL/D5L86fBlAQAAU1l7zEkrkxyUZN8kZyU5df26QzctblUAAEvDsD07r5fku1vYvynJzkOeGwAAmMLaY056WJL1SU5Ocnz/cX2/HQBgmzds2Hl6ukWIpnPPJD8d8twAAMAkfaB5QpL9Ju3aL8kJAk8AgOHDzuOTHFlVdx/Y1pKkqp6Y5BFJ3jnH2gAAgFw1dP3Y/mlN2j3x/LV9OwCAbdawc3a+NMndknwu3fycLclrqmrPJPsn+ViS14ykQgAA4KB0/8+eTiW5Qd/ulIUoCABgKRqqZ2dr7YokD0zyuCQ/T/LDJDsk+U6SI5I8uLVmknQAABiNfUfcDgBgLA3bszOttZbkP/sHAAAwf84acTsAgLE0VM/Oqtqzqm6zhf23rqo9hi8LAAAYcGqSM9LPkz+Flm4R0VMXrCIAgCVo2AWKXpPkzVvY/6Ykrxzy3AAAwID16w7dlOSo/unkwHPi+dF9OwCAbdawYee9k3xkC/s/muS+Q54bAACYZP26Q09McniSMyftOiPJ4f1+AIBt2rBh53WTnL+F/b9Ocr0hzw0AAEyhDzTXJjkkyWP6jwcIOgEAOsMuUHRWkttvYf8dk5w35LkBAIBp9EPVT1nsOgAAlqJhe3Z+KMnjq+ohk3dU1WFJHpfkg3OoCwAAAABgVqq16RZ03MJBVWuSfD7JLZN8O8n3+l2/n+S2SX6Q5A9aaxeNpsylo6pWJ9mQZE1rbeNi1wMAAAAAy8l85mtD9exsrW1IcrckL0myXbqJ0g/v//3iJHcdx6ATAAAAAFi6hurZuS3TsxMAAAAAhrfkenYCAAAAACw1w67Gnqq6RbqFiG6cZI8kNalJa63dZw61AQAAAADM2FBhZ1X9RZK3J/ldkh8luXCqZnOoCwAAAABgVobt2fmPSb6Z5I9aa+ePrhwAAAAAgOEMO2fn9ZO8TdAJAAAAACwVw4ad30kXeAIAAAAALAnDhp1PT/L4qrrHKIsBAAAAABjWsHN2PivJhiSnVtX3k/wyyaZJbVpr7bC5FAcAAAAAMFPDhp23SdLShZy7JrnlFG3asEUBAAAAAMzWUGFna23tiOsAAAAAAJiTYefsBAAAAABYUoYOO6tqZVU9qqreVFUfrKpb99vXVNXDqmrv0ZUJAAAAALBlQ4WdVbV7ki8kOT7Jo5M8JMl1+90XJ/mXJEeNoD4AAAAAgBkZtmfnuiS3SvKAJDdOUhM7WmubkpyQ5EFzrg4AAAAAYIaGDTv/JMm/ttb+O1Ovuv7jJGuHPDcAAAAAwKwNG3auSXLaFvZvlyFXegcAAAAAGMawYefPktxhC/vvn+T7Q54bAAAAAGDWhg07/z3JX1XVI3P1fJ2tqnaoqpcmeWCSN42iQAAAAACAmRh2qPmx6RYoeneSi/ptxyfZqz/nm1prb51zdQAAAAAAM1StTbW+0AwPrvqDJIcn+b10vUR/luR9rbXPjaa8paeqVifZkGRNa23jYtcDAAAAAMvJfOZrc1pEqLX2+SSfH1EtAAAAAABDG3bOTgAAAACAJWWonp1VdVqSrY1/b621mwxzfgAAAACA2Rp2GPtnc+2wc2WSGyW5Z5LvJfnmHOoCAAAAAJiVocLO1toR0+2rqtsm+WSSdw1ZEwAAAADArI18zs7W2reTvCnJP4/63AAAAAAA05mvBYrOSXLLeTo3AAAAAMC1jDzsrKq9kjw+yRmjPjcAAAAAwHSGXY39f6bZtXuSA5Nsn+QvhqwJAAAAAGDWhl2NfUWuvRp7S3Jakk8neVtr7YdzKQwAAAAAYDaGXY394BHXAQAAAAAwJ/O1QBEAAAAAwIKaUc/OqvrLYU7eWnvnMMcBAAAAAMxWtTZ56s0pGlVtHuLcrbW2cojjlrSqWp1kQ5I1rbWNi10PAAAAACwn85mvzXTOzgNGeVEAAAAAgFGbadh5VJL/aK19M0mq6oZJzmutXTpvlQEAAAAAzMJMFyg6OsktBp6fluShI68GAAAAAGBIMw07z0ly44HnNQ+1AAAAAAAMbabD2E9K8vyqun+Si/ptz6iqR23hmNZaO2wuxQEAAAAAzNRs5uw8N8khSW6VpCW5QZI9t3DM1pd5BwAAAAAYkWpt9plkVW1O8uetteNHX9LSVlWrk2xIsqa1tnGx6wEAAACA5WQ+87WZ9uyc7JAk3x9lIQAAAAAAczFU2Nla++yoCwEAAAAAmIuZrsZ+DdU5sqq+UlXnV9WmKR5XjrpYAAAAAIDpDDuM/eVJnp7kW0n+M8mFoyoIAAAAAGAYw4adj03ygdbaI0ZZDAAAAADAsIYaxp5kpySfHmUhs1VVe1bVu6pqY1VdVFVvrapdt3LMKVXVJj3euFA1AwAAAADzZ9iw8zNJ7jzKQobwriS3SnK/JH+c5A+TvHkGx70lyb4Dj7+frwIBAAAAgIUzbNj5lCR3q6rnVNVeoyxoJqrqFkkemOQJrbUvt9Y+n+RpSR5VVdffyuGXtNbOHnhsnPeCAQAAAIB5N2zY+aMkN07y4iTnVtVv++Hkg48NoyvzWu6e5KLW2tcGtn06yeYkd93KsX/WryD/vap6WVXtvKXGVbVDVa2eeCTZbW6lAwAAAADzYdgFij6QpI2ykFnaJ8m5gxtaa1dW1QX9vukcn+QXSX6V5DZJ/jnJzZM8bAvHPDvJC+ZULQAAAAAw74YKO1trR4y4jiRJVa1L8qytNLvFsOdvrQ3O6fndqjoryWeq6iattZ9Nc9jLkrx64PluSc4YtgYAAAAAYH4M27NzvrwqyTu20ubnSc5Ocr3BjVW1Ksme/b6Z+nL/8aZJpgw7W2uXJ7l84DqzOD0AAAAAsFBmHHZW1R1me/LW2jdm2f68JOfNoJYvJdm9qu7YWvt6v/ne6eYg/fL0R17L7fqPZ82mTgAAAABg6anWZjb1ZlVtzszn6awkrbW2ctjCZlDPx5PsneRJSbZL8vYkX2utPabfv1+SzyT5y9baV6rqJkkek+RjSX6dbs7O1yQ5o7V2r1lcd3WSDUnWWMkdAAAAAGZnPvO12Qxjf9woLzwCf5bkdekCzc3pFk3624H926VbfGhitfUrktw3ydFJdklyen/MSxamXAAAAABgPs24ZycdPTsBAAAAYHjzma+tGOXJAAAAAAAWi7ATAAAAABgLwk4AAAAAYCwIOwEAAACAsSDsBAAAAADGgrATAAAAABgLq+ZycFXdMMmNk+yRpCbvb62dOJfzAwAAAADM1FBhZx9yvi3JIRObpmjWkqwcsi4AAAAAgFkZtmfncUnunmRdki8n2TCyigAAAAAAhjBs2Hm3JP/cWnvBKIsBAAAAABjWsAsUnZHkwlEWAgAAAAAwF8OGna9M8viq2nmUxQAAAAAADGuoYeyttTdV1cokP6mqE9L19Nx07WbtNXMtEABYBFUHJnlyknsl2THJ6UnekeSEtHb5IlYGAAAwrWqtzf6gqt9P8l9JbriFZq21NnarsVfV6nQLMq1prW1c7HoAYKS6P2a+KslRLTnvF7vv++ULdl5day886/p7Xrrx9kl+meTBae07i1soAACwXM1nvjbsAkVvTrImyZGxGjsAjJNXJHnayTe+49uPfOg/3O+KVdv/8cSOm5+7/uz3vPuYTXtcdvGnU3XXtHbaItYJAABwLcP27LwkyQtaa68YfUlLm56dAIytqpsm+clnD7jDcY99xIv+cmLrQIu2+6Ub86XXH3HBTlde8dG09rhFqBIAAFjm5jNfG3aBIj05AGD8PKklFxz50Ofct39ek/bXRTutzhvveviqljwqVXstdIEAAABbMmzY+YIkT62qG4yyGABgUR10+pq9v3rZdjvul2sHnRPqg7c6ZE11ixbdcQFrAwAA2Kph5+z8wyQXJflRVX063QqtU63GftQcagMAFtYOG3bcdat/CL10ux0n/rn9/JYDAAAwO8OGnX8z8O8/nqZNSyLsBIDlY/0NLzr7ZltrdOuzfzLxz1/ObzkAAACzM9Qw9tbaihk8Vo66WABgXr19zeW/vcUdzvzBOen+aDmV9vivfuiylnw9yXcXsDYAAICtGnbOTgBg/JyU5IfHve8FV17vN79Orh14tid++cTc85ff2bGSV6a16QJRAACARVFz+T2lqu6W5JAk10vy+tbaT6pq5yQHJvlxa+3i0ZS5dFTV6iQbkqxprW1c7HoAYKSqbprk5MtXbrf6uDv+cU681b1XX7z9TrnVuT/P4772kcvudvr3dkzy0rT23MUuFQAAWJ7mM18bKuysqu2TvCfJYelWa21J7tda+5+q2jHJGUle01p76SiLXQqEnQCMvarrJ3l2Sx5byW4Tm1vyv5W8Kq2dsIjVAQAAy9x85mvDDmN/cbqFiZ6c5ObpAs8kSWvtsiTvTxeEAgDLTWu/SmtPq+T6Sf4wyf2T3KJau7ugEwAAWMqGXY390Une0Fp7c1XtNcX+HyT50+HLAgAWXTcdzamLXQYAAMBMDduz83rZ8gqsm5LsPOS5AQAAAABmbdiw8/R0ixBN555JfjrkuQEAAAAAZm3YsPP4JEdW1d0HtrUkqaonJnlEknfOsTYAAAAAgBmby2rsH01y73Tzc94q3bD2PZPsn+RjSQ5rrW0aXalLg9XYAQAAAGB4S2419tbaFUkemORxSX6e5IdJdkjynSRHJHnwOAadAAAAAMDSNVTPzm2Znp0AAAAAMLz5zNdWzfUEVXXLJDfqn/6itfb9uZ4TAAAAAGC2hg47q+qwJK9OsnZiU5JWVacleXpr7SNzLw8AAAAAYGaGmrOzqh6U5AP90+ckeWiSP+n/XUlOrKoHjqJAAAAAAICZGHY19i+lW5DooNbabyft2yXJ55Nc1lq7+0iqXELM2QkAAAAAw1tyq7EnuU2S4yYHnUnSb3tH3wYAAAAAYEEMG3ZelmTPLezfs28DAAAAALAghg07/yfJUVV1rWHqVXXXJH+b5NNzKQwAAAAAYDaGnbPzgCRfSnLdJF9J8qN+182T3CXJuUnu3lpbP5oylw5zdgIAAADA8JbcnJ2ttdPSzcn5L0n2SPLI/rFHkmOT3HYcg04AAAAAYOmadc/OqtoxyV8n+VZr7XPzUtUSpmcnAAAAAAxvSfXsbK1dluSf0w1ZBwAAAABYEoZdoOh7SdaOsA4AAAAAgDkZNuz8hyRHVtV9R1kMAAAAAMCwVg153N8kuSDJJ6vqtCSnJbl0UpvWWjtsLsUBAAAAAMzUsGHnbZK0JL9MsjLJTUdWEQAAAADAEIYKO1tra0dcBwAAAADAnAw1Z2dV/WFVXXcL+69TVX84fFkAAAAAALMz7AJFJye53xb236dvAwAAAACwIIYNO2sr+3dIsmnIcwMAAAAAzNqM5+ysqhsmWTuw6cBphqrvnuTIJL+YU2UAAAAAALMwmwWKHpfkBelWYW9J/qF/TFbpenUeOefqAAAAAABmaDZh5/uSfC9dmPm+JP+S5NRJbVqS3yb5VmvtnJFUCAAAAAAwAzMOO1trP0jygySpqscl+Vxr7bT5KgwAAAAAYDZm07PzKq2146baXlU3TrJDH4wCAAAAACyYoVZjr6qnVdV7Jm17e5KfJPleVX2tqq43igIBAAAAAGZiqLAzyROTXDUnZ1U9IMljk7w5ydOS3DjdYkYAAAAAAAtiqGHsSW6Ufv7O3iOSnNZae3KSVNU+Sf5ijrUBAAAAAMzYsD07a9Lz+yf5+MDz9Un2GfLcAAAAAACzNmzY+eMkD02uGsJ+/Vwz7Nw/yUVzqgwAAAAAYBaGHcb+yiTHV9WFSXZJN6T9kwP7753kW3MrDQAAAABg5oYKO1tr76mqXyd5ULoenK9vrV2ZJFW1Z5ILkvzHqIoEAAAAANiaaq0tdg3LSlWtTrIhyZrW2sbFrgcAAAAAlpP5zNeGmrOzqj5eVY+pqp1GWQwAAAAAwLCGXaDoxkn+M8k5VXVcVd23qiav0A4AAAAAsGCGCjtbazdPctckb09y/3SLE51RVa+oqtuNrjwAAAAAgJmZ85ydVbUiyf2S/HmSw3L16uzvTHJ8a+2MuRa5lJizEwAAAACGN5/52kgXKKqq3ZO8Kcmf9ps2JzklyWtaayeN7EKLSNgJAAAAAMNbcgsUTVZVf1BVb0zy03RB5/eS/H2SZyS5bpKPVNWLRnEtAAAAAICpDN2zs6pumW7o+qOT3DDJuUmOT/IfrbVvTWr75iQPb63tNadqlwA9OwFYMFU3TvJnSa6f5NIkn0nyibS2aVHrAgAAmIP5zNdWDVnQt5LcOsnlST6c5ClJPtla2zzNIScnecIw1wKAbU7VnknemuSwlvzmN9vvfPaKtN13veLSv2vJaVX1lLT2icUuEwAAYKkZKuxMclGSv07y/hmmrx9OcsCQ1wKAbUc3//UpSfb96IEHvf7v/+ioP7l0+x1vltZyu7N+nL//7HH73u2X3zlpRdVD09pHFrdYAACApWWkCxRtCwxjB2BeVR2b5LHP/KOjnv/+29zvtRNbJ3av2LypveFDL6t7/+xrv91u85V7p7XfLkqdAAAAQ1ryCxQBACNQtWuSIzZVvf79t7nfMye2DjbZvGJlveTeT2grN2/a5XcrVj564YsEAABYumY8jL2qvjPLc7fW2m1neQwAbMsOTrL6H+7/1P9Lsv90jU7ffZ/64o1uk5ud/8vHXy/59wWrDgAAYImbzZydFySZyZj3fZLcfIZtAYCrrUmSk29y562+P5+z65652fm/3GP+SwIAAFg+Zhx2ttYO3tL+qtonybOSHJlkU5L/mFNlALDtuSBJ/uhHX2jvuNNDtthw/43npaXOX5CqAAAAlok5z9lZVXtX1WuS/CzJU5O8J8mBrbW/muu5AWAbc0qSC573P2+5bZIzMs0oiRv/+ox219O/lz0v3fCmhSwOAABgqRs67KyqffqQ8+fpQs73pg85W2s/G1WBALDNaO3SJG9Z2dqTHv/VD/3LxNbBJttf+bv2wk+/sS5fud3G7TZvev/CFwkAALB0VWuzm1qzH65+TJInJtku3XD1l7TWTht9eUtPVa1OsiHJmtbaxsWuB4AxU7Vzks8k+f3Prb39iUc/+P/d94Kd11y/2ubc6+ffyNM//59X3Oqcn9XK1h6Q1k5e7HIBAABmaz7ztRmHnVW1b64OOVcleWeSl24rIecEYScA865q1yTHJvnzluTyVdtfUG3zrjtsunLXlnyrkr9Ja19Y5CoBAACGslTCzkuS7JDkW0n+KclWQ87W2jfmUtxSJOwEGIGqvZIckeTPk+yT5LdJPpXkDWntu4tY2dJSdd0kj0iyX5JLk3w6yf9mtsMyAAAAlpClEnZuHni6tYMqSWutrRy2sKVK2AkwR1X3T3JCkh02p0783j43uXRTrdjnluf+/C47bLpyrySvTPKstLZ5yycCAABgOZrPfG3VLNo+bpQXBmAbVHXXJB9J8pmH/dkr3v+N/W/x4iT7J8mqTVfmyC9/YMMzT/2P/5fksiTPW8RKAQAAWIZmvUDRtk7PToA5qPpMkj3u8LR3rbtg5zXvmdg60KId/fl31d9+4T1Xrki7QVo7exGqBAAAYB7NZ762YpQnA4BpVd08yb1/t2Llqy7Yec2rJrZObvW2Oz2kXbFqu5WbasUTFrhCAAAAljlhJwAL5W5Jct8nvOH8dEPXJwedSZKNO+5WX7zhberXO6950EIWBwAAwPIn7ARgoaxKknN32fM6W2t4xart8ruVq3aZ/5IAAAAYJ8JOABbKz5PkqC8cv/uWGq3cvCm3PuunuXzl9qctSFUAAACMDWEnAAvls0l+/tdf+eBBSc5IMuUKeff7yZfbfr85L3tf/OuXLGh1AAAALHvCTgAWRmubk/zzirRHvuUDL/rExNbBJjc/d3176SdfV2fvutd3drni0q8tfJEAAAAsZ9XalB1rmEZVrU6yIcma1trGxa4HYFmpqiSvSPKMc3bZ43svv9cR+35/7wP22v3Si3PY90/JQ//v5HbJdjus3+Oyi++c1n692OUCAAAwevOZry3bsLOq/iHJoUlul+SK1truMzimkrwwyROT7J7kC0me3Fr7ySyuK+wEmKuqhyc5KslBE5suW7ndudtt3vSvK9vm16a1ixevOAAAAOaTsHMKVfXCJBcl2T/J42cYdj4rybOTPDbJaUlenOTWSW7ZWrtshtcVdgKMStWNkuyd5JIkP0xrVy5yRQAAAMwzYecWVNURSV67tbCz79X5qySvaq29st+2Jsk5SY5orb1nhtcTdgIAAADAkOYzX9uWFig6IMk+ST49saG1tiHJl5PcfbqDqmqHqlo98Uiy27xXCgAAAADM2rYUdu7Tfzxn0vZzBvZN5dnpkuaJxxmjLw0AAAAAmKslFXZW1bqqalt5HLjAZb0syZqBx/4LfH0AAAAAYAZWLXYBk7wqyTu20ubnQ5777P7j3knOGti+d5JvTXdQa+3yJJdPPO+m/gQAAAAAlpolFXa21s5Lct48nf60dIHnfdKHm/0cnHdN8oZ5uiYAAAAAsECW1DD22aiqG1bV7ZLcMMnKqrpd/9h1oM0Pq+qhSdK6Zedfm+S5VfWQqrp1knemW6H9QwtdPwAAAAAwWkuqZ+csvSjJYweef7P/eEiSU/p/3zzdPJsTXp5klyRvTrJ7ks8neWBr7bL5LBQAAAAAmH/VdXhkpvqh7xuSrGmtbVzsegAAAABgOZnPfG3ZDmMHAAAAABgk7AQAAAAAxoKwEwAAAAAYC8JOAAAAAGAsCDsBAAAAgLGwarELABhbVTdIcuskleQHae3ni1wRAAAAjDU9OwFGreouqfpokl8kOSnJfyX5Wao+lap7LW5xAAAAML707AQYpaoHJzkhyY+vXLHySU978DMvvHCn1dd9/Nc+tN99f/qV+1fymVQ9Nq29a7FLBQAAgHFTrbXFrmFZqarVSTYkWdNa27jY9QBLSNWNkvwgySdu+7fvfs+GnXZ7VZL9J3av2LzpjE+87W9+fLNfn/6HSe6Q1r67WKUCAADAYpnPfM0wdoDReXKSK+75pLe+f8NOu70nyX6DOzevWLnfgx73r4dcumr7DUmetigVAgAAwBgTdgKMzhGbq9555pq9X94/r0n768qVq/K2Ox22qiV/lqrtF7pAAAAAGGfCToBRqNouyd6nHHDHS9INXZ8cdF7V8pvXP3BNJTsn2XPB6gMAAIBtgLATYDQ2Jdl0xcrt9tlaw12uuGTin5fNa0UAAACwjRF2AoxCa5uTfO6up3/vDltr+sc//Hwu3n6nn6WbjBkAAAAYEWEnwOi8fo/LfnPrg3/21fOStKka3OasH7d7/+wr2fHKK16R1qZsAwAAAAxH2AkwOh9K8t//fuJLdvvjH3wuKzZvuirMrLY59/npl9tx73tBXbTT6h+v2rzpuMUrEwAAAMZT6Vg0O1W1Ot3Q0zWttY2LXQ+wxFTtkuRdSQ771W7X2XTKje+4slXl7r/4Tm584a9yzq57fnfviy+4V1q7cLFLBQAAgMUwn/masHOWhJ3AjFTdeXPqyRfvsNM9r1yxartLttvxp3tdctFLdrryilMNXwcAAGBbJuxcQoSdAAAAADC8+czXzNkJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjYdViFwAsYVUrkxyc5IAkv0vypbT240WtCQAAAGAawk7g2qoqyZOTPDPJ2kn7Pp3kOWntqwtfGAAAAMD0DGMHrqkLOv8lyb8l+fwlq3a4x82eceK97/qU44741O/d7aUtuW6Sz6XqPotbKAAAAMA1VWttsWtYVqpqdZINSda01jYudj0wclWPTnJ8kiPXPuu/zk9ybJL9J3bv8LvLzzjlLUeev+9vzj8gydq0dtHiFAoAAAAsR/OZr+nZCUx2VJL/7oPOE5LsN7jz8u122O+wv3z1bTendk3yl4tRIAAAAMBUhJ3A1apumuSuV6xY9eZ0PTqTpCa3OnfXPfM/N7nzFS3584UtEAAAAGB6wk5g0D5J8k+H/NXO6YauTw46J9T3r3fATpev3O5GC1YZAAAAwFYIO4FBv02SHa+8/MZba7j7ZRfnilXbXzn/JQEAAADMjLATGPS9JGc94jv/ffstNdr+yt/l0B+emnN23fNLC1QXAAAAwFYJO4Grtfa7JG8+4MJf3fcW5/z87CRtqmZP+OoH216XbszeF//6+QtbIAAAAMD0hJ3AZK+u5Kcf+o9n7PTAH30xKzdvuirw3P3SjXnmZ4/L33/unfW1/W7xgdWX/fb7i1koAAAAwKBqbcqOW0yjqlYn2ZBkTWtt42LXA/Oi6rpJ3p3kPmfvuuemb+97s5U7XPm73P2X30m1lq/vd4t33/307/5Z/AABAAAAZmk+8zVh5ywJO9mmVN1xc+rxv955zR0v2W7HlefvsvvX9/3N+c+//sbzzlns0gAAAIDlSdi5hAg7AQAAAGB485mvmbMTAAAAABgLwk4AAAAAYCwIOwEAAACAsbBqsQuAsVB1QJInJTk4yY5JzkhyXJIPprXfLWJlAAAAANsMPTthLqoqVf+U5Gct+evT1+x94Xf2uenZF+642w2SvDfJj1J1q0WuEgAAAGCboGcnzM1Lkjz7Cze8zbue8PDnH3zp9js+YGLHrc7+2Tnves8/rNj98otPTtVd09ppi1gnAAAAwNir1tpi17CsVNXqJBuSrGmtbVzselhEVTdK8vMv3fDW7330o1/2qImtAy3aHpdsyBff8LgLdrryiv9Ka0csfJEAAAAAS8t85muGscPwjmzJxY9/+PP/sH9ek/bXhTuvyRvvevjKljwqVXstdIEAAAAA2xJhJwzvXmeuvu5XLtl+p/1y7aBzQn3g9++9eyU7JLnLAtYGAAAAsM0RdsLwdtiw427ThZxX+e32O13Vfn7LAQAAANi2CTtheKfvv+Gc/bbW6Fbn/Pyq9vNbDgAAAMC2TdgJw3v7mst/e+AdzvzBOUmmW+mrPf5rH760Jd9N8o0FrA0AAABgmyPshOGdlORHx73vBVde9+ILk2sHnu0vv/FfOeTnX9upklentekCUQAAAABGoOQvs1NVq5NsSLKmtbZxsethkVXdLMnJl63cbue33vlPVnzg9++z+uLtd8otzz0tf/W1D1/6h+u/uVOS1yZ5urATAAAAYH7zNWHnLAk7uZaq/ZM8tyV/XskuE5tb8u1KXpXkPwWdAAAAAB1h5xIi7GRaVWuS3DnJjukWI/qOkBMAAADgmuYzX1s1ypPBNq21DUk+vdhlAAAAAGyrLFAEAAAAAIwFYScAAAAAMBaEnQAAAADAWBB2AgAAAABjQdgJAAAAAIwFYScAAAAAMBaEnQAAAADAWFi2YWdV/UNVfbGqLqmqi2Z4zDuqqk16fGKeSwUAAAAAFsCqxS5gDrZP8v4kX0ry+Fkc94kkjxt4fvkoiwIAAAAAFseyDTtbay9Ikqo6YpaHXt5aO3v0FQEAAAAAi2nZDmOfg4Or6tyq+lFVvaGq9tpS46raoapWTzyS7LZAdQIAAAAAs7CthZ2fSPKXSe6T5FlJ7pXk41W1cgvHPDvJhoHHGfNdJAAAAAAwe0sq7KyqdVMsIDT5ceCw52+tvae19pHW2ndbax9K8sdJ7pzk4C0c9rIkawYe+w97fQAAAABg/iy1OTtfleQdW2nz81FdrLX286o6P8lNk3xmmjaXZ2ARo6oa1eUBAAAAgBFaUmFna+28JOct1PWqav8keyU5a6GuCQAAAADMjyU1jH02quqGVXW7JDdMsrKqbtc/dh1o88Oqemj/712r6hVVdbeqWltV90ny4SQ/TfLJxfgcAAAAAIDRWVI9O2fpRUkeO/D8m/3HQ5Kc0v/75unm2UySTUlu0x+ze5JfJflUkuf1Q9UBAAAAgGWsWmuLXcOyUlWr063Kvqa1tnGx6wEAAACA5WQ+87Xl3LMT5q6b9uDRSW7db/lukvektd8sXlEAAAAADGPZztkJc1JVqXpukl+15I0Xb7fjgy/acdeHtuSNLTkzVc9NVS12mQAAAADMnJ6dbHu6EPPYJE/79j6/95Gn/skxdz5jzd5rk2TfjeflyC9/IEd8479enGSfVD0t5noAAAAAWBbM2TlL5uwcA1X3SfLpT/7e3d585MOe+8SJrQMt2mO++fH806f+rZLcL619euGLBAAAABhP85mvGcbO0tANK98jVddL1Xz3OH5qS7535EP/4UETV59czfG3e2B+stcNfrc5eeo81wIAAADAiAg7WVxVu6XqqCQ/SHJBknOSnJ+q16bqpvNwvVVJHvLV/W/12VTtn2sHnRPt6j23fcB2lTxkAcJXAAAAAEZAiMPiqdovyaeS3Gxz6sRP3Ozu7z17t+vsevdffmffA89b/2eVPDFVh6e1j4/wqrskWfnj69xw09YanrXbdVLdHwR2TXLRCGsAAAAAYB4IO1kcVdslOSnJbk8/9OlPP/H37/33SR4xsXunKy4787/f+pTT9t947gdSdde09t0RXfniJL878Lz122+t4X4bzs3mZNOK7hgAAAAAljjD2FkshyW57UsOefzrTvz9ex+bZL/BnZduv+P17/OEN9zpt9vt+Jsk/29kV21tU5IP3vHMHxyS1s5IMvUKXa21x3z741cm9cG0duXIrg8AAADAvBF2slj+uiVf+Pe7PPRp/fNrLRJ0+XY75E13ffj2LXlkqnYf4bVfV8nNj3v/Cz7TP58ceLYnfPVDOeDCs1atSPvXEV4XAAAAgHkk7GSx3Op7e9/kp0mmXyQoqf+5yZ13r2SHJDcZ2ZVbOzXJy+512jce+4m3PfW/b3be+nMmdt3k16fnlSe95rfPPfmtlWRdWvvcyK4LAAAAwLwyZyeLpV28/c67ba1Rtas6XU493Hx4/5DknAPP+8U/fOptf3Pdy1Zud86mFStX7vK7y67TkkuTPDfJv4z4mgAAAADMI2Eni+Xbtzz357fcWqODf/61bKq6bGVrPx3p1VtrSY5N1RuTPHTHTb/7/Wz6XZJ8r5IPprXLR3o9AAAAAOadsJPF8sY1l//2I/dc/63zvrD2dtfJFEPZd7n8kvYX3/zY5mr5z7S2cV6q6ELN98zLuQEAAABYUObsZLF8LMmX3nbCC7e/zVk/TiYNU19z6W/aW058Se1x6cbLV6S9YlEqBAAAAGBZqdZGPRXieKuq1Uk2JFnT5qu34bai6jrpQs87f+mGt77svw48aMcrVm6X2//qR/mT/zu5bbd50yXbbb7yj/oFhQAAAAAYA/OZrwk7Z0nYOWJVOyR5ZEuekuQuldRlK7c7d7vNm16/sm1+c1o7a7FLBAAAAGB0hJ1LiLBzHlWtSLIyrf1usUsBAAAAYH7MZ75mgSKWjtY2J9m82GUAAAAAsDxZoAgAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMbCqsUugCWk6jpJ7pFkxySnJ/lyWtu8uEUBAAAAwMwIO0mq1iZ5cZJHJNl+YM9PUvWaJG9Ma20xSgMAAACAmRJ2buuqbpnk5CRXXFkrnnfUg5952g+ud8Buf/atj+/6uK995J4r0l6f5PapOlLgCQAAAMBSVvKr2amq1Uk2JFnTWtu42PXMSdWqJD9McslD/vLVr/rOvjd7SZL9B1qc8S8fefmHHvKDz/1NkieltTctSp0AAAAAjI35zNcsULRte3CSm7zgvkce9519b/b2JPtN2r/f3z7k75+6fvd9v5jk71JVC18iAAAAAMyMsHPb9pct+fJxd3zw0f3zyWFmJcnz7v/kmya5eZK7LGBtAAAAADArws5t236/2H3f89INXZ+u12Z9d5+bXq//9/7TtAEAAACARSfs3LZdtmnFiutsrdGul18y8c9L57ccAAAAABiesHPb9j9rLzzrdgNh5pQe8oPPZVPVFUn+d2HKAgAAAIDZE3Zu296yom3e7qgvvPs3SdpUDa7z2wvb4772kU3Vcnxau2CB6wMAAACAGRN2bstaO7OSFzzxqx/c7ZmfPa5WX3bxNQLP2/7qR+3d735O7XbFJRtXpD1/scoEAAAAgJmo1qbs0Mc0qmp1kg1J1rTWNi52PXNWVUmevTn1ostXbbfi5BvfqS7eYefc4tzTcutzfpbfbL/zmbtdccm909qPF7tUAAAAAJa/+czXhJ2zNHZh54SqfTfVir/esOOuf7KpVuxy5cqVv9rzkg2v2mHTlR9La5sWuzwAAAAAxoOwcwkZ27ATAAAAABbAfOZr5uwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxIOwEAAAAAMaCsBMAAAAAGAvCTgAAAABgLAg7AQAAAICxsGqxC1jGdquqxa4BAAAAAJab3ebrxMLO2Zv4ZpyxqFUAAAAAwPK2W5KNozxhtdZGeb6xV113zusn+c1i1zJGdksXHu8fX1dYStybsDS5N2Fpcm/C0uO+hKVp4t48MMmP24jDST07Z6n/Bpy52HWMk4HpAH7TWhtpmg8Mz70JS5N7E5Ym9yYsPe5LWJoG7s2zRh10JhYoAgAAAADGhLATAAAAABgLwk6WgsuTvLD/CCwd7k1YmtybsDS5N2HpcV/C0jSv96YFigAAAACAsaBnJwAAAAAwFoSdAAAAAMBYEHYCAAAAAGNB2AkAAAAAjAVhJwuuqvatqnVVdXJV/aaqWlUdPIvj/7E/ZvLjsvmrGsbfXO/N/hz7VdX7quqiqtpYVR+uqhvPT8Ww7aiq3avqzVV1XlX9tr9P7zDDY98xzfvmD+e7bhgHVbVDVf1zVf2qqi6tqi9X1f1meKz3RZgHw96XfpeE+VVVu1bVC6vqE1V1QX9/HTGL44f+P++gVbM9AEbg5kmeleQnSb6b5O5DnufJSS4eeL5pjnXBtm5O92ZV7Zrk5CRrkvxTkt8l+bskn62q27XWfj3acmHbUFUrkpyU5LZJXpHk/CRPSXJKVd2xtfaTGZzm8iRPmLRtw0gLhfH1jiSHJ3ltuvfII5J8rKoOaa19frqDvC/CvHpHhrgvB/hdEubHdZI8P8kvk3w7ycEzPXBE/+dNIuxkcXw9yV6ttQuq6vAk7x/yPCe01s4fYV2wrZvrvfmUJL+X5C6tta8mSVV9PMn3kjwjyXNGWSxsQw5Pco8kf9paOyFJqup9SX6c5IVJHjODc1zZWvvP+SsRxlNV3SXJo5I8s7X2yn7bO9O9t7083b05He+LMA/meF9O8LskzI+zkuzbWju7qu6U5KuzOHYU/+dNYhg7i6C19pvW2gUjOFVV1eqqqhGcC7Z5I7g3D0/y1Ylf6Ppz/jDJZ5I8Yq71wTbs8CTnJDlxYkNr7bwk70tyWFXtMJOTVNXKqlo9PyXC2Do8XY+vN09saK1dluStSe5eVTfYyrHeF2H05nJfTvC7JMyD1trlrbWzhzx8JP/nTYSdLG8/TzcE7zdV9Z9VtfdiFwTbqn7IwW2SfG2K3V9JcpOq2m1hq4Kxcfsk32itbZ60/StJdk5ysxmcY+ckG5Ns6OdP+rd+iC2wZbdP8uPW2sZJ27/Sf7zdVAd5X4R5NdR9OYnfJWHpGcX/eZMYxs7ydGGS1yX5Uro5yA5K8tQkd6mqO03xpgfMvz2T7JBu2MJkE9uun+RHC1YRjI99k3xuiu2D99Z3t3D8WemG9X0j3R+6H5hueO1tq+rg1tqVI6wVxs2+2fp721S8L8L8Gfa+TPwuCUvZXP/PexVhJ3PS/9V6+xk2v7y11uZ6zdbasZM2faCqvpLkXel+eVs312vAcrcI9+ZOE+eaYt9lk9rANmvIe3OnzOHeaq09e9Km91TVj5O8NN1woffMsB7YFg17/3lfhPkz9Pui3yVhSZvT/3kHGcbOXP1hkktn+Lj5fBXRWjs+ydlJ7jtf14BlZqHvzUv7j1PNo7LjpDawLRvm3rw0o7+3XvP/27vzIM2q8o7j398MKIWig4IEN4aMFCSiMQaBGFQYVAQrJIjElCCiIIplsUVDiCDEhR01QhmJbAKGJYAoJEC0wiBgxQBhJ2zDDIKA7AQyDLI8+ePchjfvdE/39PQ2zfdT1fX2e++595x7+7117/v0OecBnsf7pjSc0V5/3hel8TOm90W/S0pTxphd2/bs1PK6BfjUCMsONtRgLN1NGzIkaeKvzUdo/4Vbe5B1A8vuHYN6pBXdaK7N+xjja6uqnkryMN43peHcB7xhkOXDXX/eF6XxM9rrcmn8LilNvjF75jXYqeXSZdk6ebLb0WXRmw1cM8lNkaaEib42q+r5JDcAGw2yehPgzqp6YqLaI01Vo7w2rwXek2RG34TtmwCLgNuWtR1dYpQ1gAeXdVvpJeZaYIskr+qby2+TnvVL8L4ojatrGcV1ORS/S0pTxrWM0TOvw9g1pSV5c5IN+patOUjRPYA1gYsmpGHSS9xg1yZwNvCuJBv1lFsfmAv880S2T5pmzgbWAj4ysCDJGsAOwPlV9XTP8jlJ5vS8X2WIjM8HAsH7pjScs4GZwO4DC5K8nNZD+5dVdXe3zPuiNHFGfV36XVKaGpKsnWSDJCv3LB7xM++w+x+DfDHSMktyQPfrW4G/BE4EFgBU1dd7ys0D3ldV6Vm2CDiTloVrMbBZt4/rgD+pqkUTcAjStLSc1+ZqtP+IrwYcBTwD7Et7GH1HVdmDTBqFJDOBy4ENgSOBh2hJFN4MvKuqbu0puxCgqmZ372fTrsvTaUPoAbYCtqF9qftw33/OJfVJchawHW2u2zuATwIbA1tW1c+7MvPwvihNmOW4Lv0uKY2zJF8AZtGyp+8BnMuLPaePqarHk5xMu27XraqF3XYjfuYdtg0GOzUZkgz5weu7Gc1jyRvU94F3A2+iTVR7F3AO8A2HA0nLZ3muzW75G2kPnR+kjR6YB+xTVXeMR3ull4okq9Me+v6clonySuCLVXVVX7mF8P+CnbOAY4BNaQ+cM2lfCn8IHFVVz0xE+6UVWZJVgK8BOwGrA9cDB1bVxT1l5uF9UZowo70u/S4pjb/ueXSdIVavW1ULBwt2dtuO6Jl32DYY7JQkSZIkSZI0HThnpyRJkiRJkqRpwWCnJEmSJEmSpGnBYKckSZIkSZKkacFgpyRJkiRJkqRpwWCnJEmSJEmSpGnBYKckSZIkSZKkacFgpyRJkiRJkqRpwWCnJEmSJEmSpGnBYKckSZIkSZKkacFgpyRJ0hSW5OAkNdntGG9JVkpyRJK7kzyf5LxueSU5eHJbJ0mSpBWFwU5JkqQJkmSXLng38LM4yb1JLk6yZ5LVJruNA5Ks2gVaNx9h+c27Y/roKKv8NPAl4Gzgk8C3RrmfFV6SzZJcmOTX3WfkV0nOT/LxyW6bJEnSVLfSZDdAkiTpJegrwAJgZeB3gM2BbwP7Jtm2qq7vKft14LCJbiCwKnBQ9/u8CahvLvDrqtpnAuqaspLsAJwJXAv8PfAosC7wXuAzwD9NWuMkSZJWAAY7JUmSJt6FVXVVz/tDk8wFLgB+kuT3quopgKp6Fnh2aTtLMgN4WVUtHrcWj7/XAY9NdiOmgIOBm4FNq+q3vSuSvG6iGpEkwCoDn0NJkqQVhcPYJUmSpoCq+nfga8A6wE4Dywebs7MbLn5skh2T3AQ8DXyoW/eGJCcm+U2Sp5PclOTT/fUlWaXb923dUOn7kpybZE6S2cCDXdGDeobdH7wsxzTQ9iRvSXJykseSPJ7kpCSrdmVmd8e3BfDWnro2H2KfJydZOFRdgyzfKcnVSZ5K8kiSM5K8qa/MvCQ3Jvn9JJckWdQNIf/rZTlvPWVmJNm7O/eLu7/FcUlWH8FpmwNc2R/oBKiqB/raMiPJXklu6Op5MMlFSTbqKbNSkgOTzO8+DwuTHJLk5X37WpjkgiRbJbkKeAr4bLduVpJvd/OpPp3kjiT7dUF2SZKkKcUHFEmSpKnj1O71gyMoO5c2r+WZwF7AwiRrAf8BvB84tlt+B3BCkr0HNkwyk9aL9CDgauCvaEOmXw1sSAt07tEV/xHwie7n3FEe11nAasD+3e+78OIQ+Qe7fd8C3NNT13+Psq4XJPkycApwO7AvbaqALYGfJ5nVV3x14CLgOtr5uAU4PMnWPfsb7rwNOA44EriC9jc4CdgRuDjJysM0+y5gyyRvHMEhntAd093AfrTpDhYDm/aUOR74KvBfwD7ApbS/wxmD7G994HTgp127r+2C0pfSAvCnAHt2x3Uo8M0RtFGSJGlCOYxdkiRpiqiqe5I8TuvdN5z1gbdV1c0DC5IcD8zslj/cLf5ektOBg5Mc1w1L3pkW9Nu3qnoTAR2WJFVVSc4G/gG4vqpOW85Du6aqdu1p52uBXYH9qup/gdOS7AY8NwZ1DdSxDvB3wAFVdUjP8nOBa4DPA4f0bPJ6YOeqOrUrdwIt8LgrcGFXZqnnrdtuM2A3YMeqemF+zSSX0IKpO7D0eTcPpwUx5ye5Argc+DfgF1X1fM/+tqAFjb9TVXv1bH90T1v+gJbs6fiq+ky3/rtJHgC+mGSLqrqkZ9u3AB+qqot76jmA9nn8w6q6vVt8XJJ7gS8lObqq7l7K8UiSJE0oe3ZKkiRNLU/SekEO59K+QGeA7YHzu7drDPwAF9N6H76zK7498BBwTP9Oq2qJoeBj4Ht97y8DXpvkVeNQ14CP0J51z+o7F/fTenpu0Vf+SeCFQGs3jPw/gd/tKTOS87YD8Djw0756r+7q6K+3fz8n0qYkmAdsBhxIO1+3J3l3X1uKFtAdqi3bdK/9PTCP7l4/3Ld8QW+gs+d4LgMe7Tuen9EC6+9d2vFIkiRNNHt2SpIkTS2vBB4YtlTL5t5rTWAWsHv3M5iBBDdzgFu75EcT4Vd97x/tXlcH/mec6lwPCC2wOZhn+t7fM0ig91Hg7T3vR3Le1qMFlof6Gw6bZKgLOF7cDSH/I+BjwOeAC5Js0M3dOQe4t6oeWcqu1gGep01l0Lv/+5M81q3v1f+ZgnY8b+fFOVz7TVjSJEmSpJEw2ClJkjRFdPM0vpq+4NQQ+rNkD4zYOQ34wRDbXD/Kpi2v54ZYnlHsa6iepzP73s/oym49RP1P9r0fqzbOoAU6dxxi/VBBwyVU1SJar8rLkjxEmyt0a4b++w65qxGWGyzz+gzaHJ5HDLHNbcvYFkmSpHFlsFOSJGnq+ET32j+UeCQeBJ4AZlbVz4YpOx/YJMnKVdXfw3HAeAxnHwuP0nqw9uvvpTifFqhcUFVjFZAbyXmbT0sQdUU3P+pYuap7Xbunnq2SvGYpvTvvogUr16Mn4VOXyGpWt34484FXjuAzJUmSNCU4Z6ckSdIUkGQubX7GBcAPl3X7qnoOOAfYPsmG/euTrNnz9hxgDeALg5Qb6Mm4qHudtaxtGWfzgVcneWF4eZK1ge36yp1L6615UM8xDZRPlyRpWY3kvJ1F62V64CBlVhokC3x/mS2HWDUw/+atPW0JL2a1H6wt/9q97t1XZN/u9V+W1pbOWcAfJ9lqkHpmJbHzhCRJmlJ8OJEkSZp4WyfZgPYsthYwF/gArafdtlW1eJT7/RtaApxfJvk+cDPwGlpiovd3vwOcQsss/s0kG9OGSr+iK/Nd4MdV9VSSm4GPJbkNeAS4sapuHGXbxsoZtIzlP0ryHWBVYA/acOqBBExU1fwuk/ihwOwk59F6vq5LC4z+I3DUMtY9kvN2aZLjgP2TvIOWSf0ZWu/KHYC9gLOXUsePkyygJZqa37P/PwWu7JZTVZckORXYM8l6tEzvM4D3AJcAx1bVdUl+AOzeBVkvBTamZWg/ry8T+1COBLalzRd6Mi3R0iuAtwEfBWbTkjZJkiRNCQY7JUmSJt5Xu9ff0oKIN9B6351UVU+MdqdV9ZsuCPcVWjbyzwMPAzcB+/WUey7JNsCXgY/TMns/DFzetWXAbrTM498CXkbL/D2pwc6qejjJdrQM40fQesLuTwsmvrOv7GFdoHYfXuwBeTctAPmTUdQ9ovNWVZ9LcjXwWeAQ4FlgIW0+1SuGqWY34M+AvwBeT+u9eSfwDeDwvuRIn6LNw7orLSj5OG24+y/69ncnsAstyHs/LQC8RBb3IY55UZL3AX9LC9buTEsqdRvtnD4+kv1IkiRNlCyZdFKSJEmSJEmSVjzO2SlJkiRJkiRpWjDYKUmSJEmSJGlaMNgpSZIkSZIkaVow2ClJkiRJkiRpWjDYKUmSJEmSJGlaMNgpSZIkSZIkaVow2ClJkiRJkiRpWjDYKUmSJEmSJGlaMNgpSZIkSZIkaVow2ClJkiRJkiRpWjDYKUmSJEmSJGlaMNgpSZIkSZIkaVr4P4/QeO9kr3jGAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.scatter(mean_train_influences, mean_nystroem_train_influences)\n", + "plt.scatter(\n", + " mean_train_influences[:num_corrupted_idxs],\n", + " mean_nystroem_train_influences[:num_corrupted_idxs],\n", + " facecolors=\"none\",\n", + " edgecolors=\"r\",\n", + " s=60,\n", + ")\n", + "plt.xlabel(\"Direct Influence Score\")\n", + "plt.ylabel(\"Nystroem Influence Score\")\n", + "plt.title(\"Influence of training points - Nystroem vs direct method\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "id": "de6f36f3-97bf-49a4-9a3a-84a60bd5228d", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pearson Correlation Nyström vs direct 0.9977239930897606\n", + "Spearman Correlation Nyström vs direct 0.992110235030355\n" + ] + } + ], + "source": [ + "print(\n", + " f\"Pearson Correlation Nyström vs direct\",\n", + " pearsonr(mean_nystroem_train_influences, mean_train_influences).statistic,\n", + ")\n", + "print(\n", + " f\"Spearman Correlation Nyström vs direct\",\n", + " spearmanr(mean_nystroem_train_influences, mean_train_influences).statistic,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "97e83f8f", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, + "source": [ + "### EKFAC" ] }, { @@ -812,14 +1375,20 @@ "id": "fad8517d", "metadata": {}, "source": [ - "The EK-FAC method is a more recent technique that leverages the Kronecker product structure of the Hessian matrix to reduce the memory requirements. It is generally much faster than iterative methods like conjugate gradient and Arnoldi and it allows for an easier handling of memory. Therefore, it is the only technique that can scale to very large models (e.g. billions of parameters). Its accuracy is however much worse. Let's see how it performs on our example." + "The EKFAC method is a more recent technique that leverages the Kronecker product structure of the Hessian matrix to reduce the memory requirements. It is generally much faster than iterative methods like conjugate gradient and Arnoldi and it allows for an easier handling of memory. Therefore, it is the only technique that can scale to very large models (e.g. billions of parameters). Its accuracy is however much worse. Let's see how it performs on our example." ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 36, "id": "84994bf4", - "metadata": {}, + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, "outputs": [], "source": [ "ekfac_influence_model = EkfacInfluence(\n", @@ -836,9 +1405,13 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 37, "id": "8479274e", "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, "tags": [ "hide-input" ] @@ -848,7 +1421,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Percentage error of EK-FAC over direct method:1927.0627975463867 %\n" + "Percentage error of EK-FAC over direct method:1093.1286811828613 %\n" ] } ], @@ -868,9 +1441,13 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 38, "id": "03927fb8", "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, "tags": [ "hide-input" ] @@ -878,7 +1455,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABS4AAALGCAYAAABLZg+zAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB6yklEQVR4nOzdeZhkZ1k3/u89M0kgIQlLIGSDCTuK7AioEYKC6IgIBsT8lO1VEAQSUDSgvIAijuIriQoKIgSBIBAji6OAQgLDIoussm8NmSQEQiAJZJ95fn+c06HS6Z6u7q7qrur+fK6rrpo65znn3N1dVT317Wep1loAAAAAACbJprUuAAAAAABgLsElAAAAADBxBJcAAAAAwMQRXAIAAAAAE0dwCQAAAABMHMElAAAAADBxBJcAAAAAwMQRXAIAAAAAE0dwCQAAAABMHMElAOtGVR1UVX9dVTNVdXVVtaq6a1Xdv//389a6xvWmqh5UVR+oqu/13+M3r3VNc1XVWVXVRnCe5/Vf4/1XXtX0qKqt/dd96lrXAoOq6tT+ubl1YJvn6yqZhN+t/fXPWqvrAzB+gksA1lT/oWPFoVLvL5I8Ncmnk/xZkucn+eaIzs0cfVjwliRHJ3lluu/3Py9yzGP7n/ljx14gE6f/o8LMWtcx10AovbfbWXOOmZkbmg3su2lVfaTf/+qq2rKXa5+6lOv2x/zhwP7bD/H13aSqntP/keGCqrqqqr5TVTur6tlVdehQ3yiWZVrf94TAAEyCBf8TBQBT6BeTfLG19pDBjVV1hzWqZ7372STXS/K7rbXT1rqYvXh0kv1HcJ6/TRfMfmME55om5yS5Y5KL1rqQVfCeJGctsG9mmBNU1dFJ3pnkNun+mHJSa22YP868JcknFrtuVVWS30zSklSS30rye3up5xeTvDbJwUm+nORfk3yrf3zvJC9I8uyquk1rbT38oWcjPV8BYN0TXAKwnhye5L1rXcQGcnh/f+6aVrGI1tpIgsbW2gVJLhjFuaZJa+2qJJ9f6zpWyVmttect9+CqumuS/0hyaJKnt9ZOXsLhb26tnTpEuwcl2Zrk1CQPTvKYqnp2a+3Keeq5X7qg8uokj0vy6rkhalX9WJJT0v0RYuptsOcrAKx7hooDMHEGh6f1//7nfnjj5VX10b4H0WD72TkMK8n99jbEcs5xCw5b3dt8hlV1h762s6vqyqo6v6pOm2/I5uAcbFX1xKr6dP91nF9VL6+qgxe4/pHVzdf5paq6rKourKoPV9VzFmj7t1X11aq6oh8C+taqutfevv4FrvvIqnpvVV3UX/fTVfWsqtpvoM39++/38/tNZw58z6/z/Ro47qwkr+ofvmrOcNitfZtrvu9VdXxVfaiqvj/4c+qHXf5L//VeVlUXV9X7q+rXF7puzZmOoAbmZqtuHtQd1c3TeWlVvaeqfmKe88z7nJh9rlXVIf3P9Lz+5/CZqnrcAjXt159v9mf2tap6Qb996Dnb5rxW7lBVb+6fKz+oqvdV1YP2cv2T+p/vpf33cGdVPXJv15izfejn9sBz5pZJbjnnZ3/qQLtjquptVbWr/758s6r+u6qeO8z3Yy1V1bHpemzeOMn/t8TQcil+q7//hySvS3JIkofNU8+mJC9L11HhhNbaqfP1/GytfTpd7+lz9nbRqvr7/uf10AX237vff/rAtkOr6i+r6gv9c/J7/b9PrapbDfn1pqp+tn9+/qB/fr+5FuhJP8Tz9VZV9dSq+lT//nHWQJsbV9WfVdXn+n0XVdW7Fnod9cf8at/mwv75P1NVr6+qe/b7z8oi73t7Offg6/vWVXV6de/vl1TVO6vqTn27mw6891xe3TQFxy5wzi1V9eT+dXVx//r/eFU9pX/OzLZ7XpKv9Q8fM6fux85z3qHeR/u2B/ff5y/09X63qt5RVT+7QPt9q5vq4Cs15/1yb98/ANYHPS4BmGS3TPLhJF9N8pp0gcCvJnlLVf1sa+3Mvt2p6YZ3PjfJ1/vHyZBDO5eiqh6c5Iwk+yR5W7qhl0cmeXiSbVV1bGvtY/Mc+hdJfq4/5p1Jjk0XQNwmyQPmXOOeSd6R7ut9b3+9/ZP8SJLnJfmTgbZ378934/6YM9IFGb+c5H1V9bDW2r8P+bW9MMmz0vUqPC3J95P8fJIXJvm5qnpQ36trJl1oef8k90vy6vzwez2ThZ2a5HtJHprrDov93py2v5vkgem+X2emG9Y66++SfCbd9+a8JDdJ8gtJXlNVt2+tXSfc3Yt7Jvn9JB9M8ookt0jyK0neVVV3ba19Ycjz3DDJ+5NcmeT0JPsleUSSV1bVntbaq2cbVlUl+Zck25J8Kd0Q9H2SPDbJjy6h9kFH91/Dp9OFVYele638R1Ud31p7w8D19033XLlfup5pL0n3/DouyRv6r/vZS7j2MM/tmXTPmRP7xycPHP+Jvq4HJ9mR5OIkb00XpN043bDfJ+eHQfnEqapHpHuPuirJttbaf43pOocm+aV0U2J8oKouTvdaeUKSN8xpfr8kt0/3ffzHvZ23tbYnyZ5FLv/qJE9MN/XCW+bZ/5j+/tS+1v3TvSZuneQ/0z0/Kt37+kPTvU6+usg1U1XHpfvaruzvz0vyU+me759a7Ph5nJLkmHTPtX9Psru/zi3T/R7ZmmRnkrcnOSDdFCRvr6onttb+YaCuShdIPibde+YZSb6d7vfBsUm+kOSjWdr73kK2JvlQks/159uaLqw+q6ru29d6cbrvz42TPCrda/92gz3Oq2r299bP9fWdluTyvt6/STd1wG/0zc9K9752QpJPJnnzQD2DX0OyhPfRqrphuufFjyT5SLr3gkOSPDLJO6vqSa21lw20ryRvTPf9+0q698t9kzw+yY8t8n0DYD1orbm5ubm5ua3ZLd08bW3Otq2z25M8d86+n+u3//sC5zprnu337/c9b872mSQzC9T1vP6Y+w9su1GS76b7kPojc9rfKV3Q97E520/tz/ONJLcY2L4lXfDWkvz4wPZ90/VyaUmOn6euI+ec48vpPnjeb067w9MFFucl2W+In8N9B+q8+ZxrvK3f9+zFvkdDXOex/TGPXeT7/oMkd1ugza3n2bZvknelC46OmLPvrHmeY7PPievUki6caUleOszXO3CeVyTZPLD9R9IN0f3snPa/0bd/b5J9B7bfMF2QOO/zeIHvxdaB679ozr579t+P7yY5aGD7s/r2/55ky8D2m6V7TbQkPzHPNU5dyXN7iNfcv/TH3GWefYcM+xxbzm3gZ3tW/+/5bveZ52tp6YLb3UnOT3KPZVx79vv45gWue8OBtif1bZ81sO2j6ULH28w573P6tq8d4ffpC0muSHLjOdv3S3Jh/z3Y0m97SH/9F89znn2THDjE9W6Q5Dv98/iec/a9eOC5v3UJz9dzkhw9z7XO6r+Pj5qz/YbpgrrLkhw6sP0J/fk+nOTgOcdsTnLYwOPHZi/ve3v5+me/lpbkDxf4+V6Y5O+TbBrYN/se8+I5xzyv3/43ufZ71eZ04XZL8tDFvpcD++8/UN9j5+xb6H30Zf32lyWpge23TTcv6RVzfp7H9+0/mOR6A9tvnC7IHPr90s3Nzc1tOm+GigMwyb6ebuGIa7TW3pEuKPnxNajn0ek+xD63tfbZOXX9b7qhm3erqh+Z59g/bgM9X1prV+eHwwcHv5aHpPuw+NY2z4I3rbVdAw+3pevN9DettffMaXduukDl5kl+Zoiv7fH9/QvawAIdfZ2/m+4D/W8OcZ5ReXlr7ePz7WitfWWebVem6zm4JcN9vbPe3647r+Ar0wWOS3mOXZrkGa213QM1fTZdz6I7VtUNBto+pr//ozYwL2Fr7XsZ6E27RBcl+ePBDa21j6YbSnzDXHso8ePTfdh/Rv/znW3/rYHrL+VnPexze1iXzd3QuvlFV8P90vXcnu92nwWOeWa66ZeOa639zwqu/dAFrnvD5FqL8uxJ8k8Dx52aHy7SM+iw/n5XRufV6ULHX5uz/SHp/rDzusHnVG++n+eVrbVLhrjeQ9MFVKf1z+dBz8vyFuD5i9ba1wY3VNVd0v3s/6W19s9zav1eup/D9dL1Ipz11P7+ia21i+Ycs7u1dt4yalvITJLtc7bN9uLeL8kzW9drdtZp6d7D7jq7oR8G/tQk30w3/+rge9XudO/zLcn/t4z6hnof7Xt7/3q6P/I9q7XWBmr4UpK/Tvf8evTAeWan23h2a+3ygfYXZvnvlwBMEUPFAZhknxj8cDXg7HQ9BFfb7DXv0s//Ndft+vs7JvnsnH1zP3Qn3deRdB/4Z82GI/+xhHpuuUA9tx2oZ7Hh4nfv7989d0dr7YtVtSvJ0VV18NwP6WPy4YV2VNUtkvxBuoDyFkmuP6fJEUu4znV+Lq21q6rq/Fz757KYL7XWLp5n++DP+Pv9v++WLnz6wDzt37eEaw762AJB0FnpgtK7JXl1VR2Ybgj3Oa21+RYwmf35320J1x72ub2Y16WbcuFDVfWGdFMEvH9OWL+gfgjqifPsOrkPn4bx/Lb0xXneka4n+Cur6gGttbMHd1bVienDxwFvbq19Ys62x80T/gx6QLo/VLyjtTY4H+VpSf5fksdW1R+1bnGacfmndGHRY9L9oWDWbBh/6sC296Tr3XhSP6XFv6cL8hd6X5/P7PvSe+buaK1dVFWfSBc4LsV87y2z76UHL/BeetP+/o5JUlUHpOtlf/5Cf2AZsfm+Z7OLon1x7mu/tba7fw87cmDz7dKFwF9K8kddDn4dl6X/Gpdo2PfR26ebluL9ffA417uT/FGu/f5z93Tvl/O9N561jFoBmDKCSwAm2fcW2H511maBuZv093N7Ns11g3m2fW+ebbM9kzYPbLthf7/XhTLm1POIZdQz18H9/UK9hM5LFxLeMMvr5bRU35xvY3ULenw43YfhnenmVLwo3VDdrekClKUs2PC9BbZfnWv/XFZynsw518FJLpynZ1rSDbVdjoWOm/0+Hjznfm8/5+S6QdvefG+ebfN93XvVWjujuoW3fjddr9AnJklV/U+63ln/ucgpbpiuZ9xcpy5Q46g8KV2vyycl2dmHl4NzN56Ybl7HQTO57jyBi3lCf3/q4MbW2oVV9bZ0vQEfmm7uyOSHP8ulBPl71VrbVVXvSvLAqrpja+1zVXWzdKubf6K19qmBthdX1X3SzU36S+nC3SS5oKpemq5392Ih6+zzdbHn91LMd8zse+kD+9tCZt9Lb9jfD/M+PQrXec9trV3dh48LvR9fnW7u3FmzX+NtM//rZNYwvy/m+t5eapj73pcs7f1n9v1yvufKcn7+AEwZwSUAG9medMPS5nPDebbNfkC8y+AH9BH7Xn8/TNgwW89DW2tvXeF1Z89183Tzhs112Jx249YW2P6MdB/Ar9M7rap+LT/s+TXJLk5y46raMk94eegyz7nQcTfv7y+ac3/zedomq/9zvpbW2o4kO/oebfdOtzDKk5L8W1Xdbe4UDXOOnUk3ZHq1tdbak6vqsnTPz/dW1c+0fkGS1trWlV6gqm6absGtJHl9Vb1+gaZPyA+Dy9keavevqs1L6OW4mFenC/cek27Ozf8v3WeKV89t2PeW/T/9MPcfSddr9HeS/N90f3xabCGt2efhYs/vpZjvvWX2Oie01v56iHN8r78fWSi8Cma/xn9trT18jWtYyvvPReneL/eZJ7xczs8fgCljjksANrLvJjm0X2l1rnvOs+2/+/tjxlfSNdf4+SW0HUU9s8Md7z93R1XdJt2Qw68tYcjtQmbDk6X0Zhx0m/7+X+bZt9Qho2vl4+n+D/YT8+z7qWWe8+79MPC57j9wzfRDSr+S5Iiquu087Y/t7z+2zDoWsztD/Oxbaz9orb27tfaMdKva75vhXhNrprX2u0n+NF2Y9Z6qGuWKx49J9z34n3SLqMx3+3aSn62qo/tj3pNuMZ0j88N5AudVVZsWeB+czxnpwvdf7+dNfEy6nnXXmZN3Vut8prX2N/lhj8ZfHuJas8/D67y2q+rgDMzhuEJLei9trf0gyf+m+/0xzLQKK33fG4XPpwtc77OEn/Wo6/5CuvmA79JP7TDXfO8/H0v3fjnfe+P9R1QXABNMcAnARvbhdD2FrvWhvqoem+Qn52n/qnQf/J5bVddZdKT/8H//Fdb0tnTDSH+p70E49xqDc5a9JV0I9TtV9Qvznayq7ltV+w9x3Vf293/U9+6aPX5zkr9M93+GfxzqK9i77/T3t1jm8TP9/f0HN1bVz2V1Fw9aidmFVV7QL1aR5JogZrEeaAs5OF0vtmtU1T3T9Ya7KMm/Dux6ZbqeiS/qf76z7Q8ZuP4rMx7fSXLTqpo7L2mq6qerar7RQLO97S4dU00j01r7oyR/mK7mM/u5HUdhdnqKJ7fWfnO+W/pVmtO/DvrFWp6YLlT866r69ZpnYsN+MbF3Zsjeg621y5K8sW//9CR3SfLv/eJOg+f90aqar6fkUn6eb0n3B6bj++fzoOflh0OPV6Rf+GdnkodX1ePna1NVP9YPi5812zPzZf1rd7Dtpqo6bGDTSt/3Vqzv3f036Xo1/vUCr8HD5iwu9910PVRHUne/GNnrkhyYOQvrVNWtkzwt3QryrxnYNbvQ159W1fUG2t843XyYAKxzhooDsJH9TbrQ8u+q6mfSLShy13QLNfxbumGq12itfaeqjksXAv13P9fbZ9J9sDuqP+4m6VafXZbW2pVV9Yh0QcJpVfXEdL2Brpdu0YSfSf/7u1/84OHpFgfZUVUfSDdv3qV9PfdKcqt0H1T3GhK01j5QVX+R5PeT/G9VnZ7kB+l6ud0p3bDTFy336xrwwb6WE6vqJvnhHGV/M+SiPy9N9zN7U1/juX19D04XpvzqCGoct39K8qh0Nf9vVb013Vx0v5LkI+kWsNiz8OHzem+S36yqe6dbAOWwdN+LTelWPR5cOOgv0/1cH5rkk1X17+kWzHhEkpulW3V5uYsELeZd6Z6Xb6+q9ya5IsknW2tvSxcEHVFV708XUF+Z5B7phhd/Pck/z3vG0br/AouzJMn3WmsnL3aC1toLq+rSJC9O8u6qenBr7b8XO24h/R9Dbpfk0621BRetSveHhT9M8riqem5r7erW2nv694jX9LfnVNVZ6XpnHpyuZ/m9073Wr7P69168Ol1A+mcDj+d6YLpw/INJvpjkW+l6fz403fN70feT1tr3q+oJSd6Qbv7QN6SbB/Gn0r3u35vkp5dQ994cn25xmH+sqqcl+VC6P1QdmeTO/fXu238dSfKKdD00fyPJl6rqLem+r4ene86+Ml24mqz8fW9U/iRd0PzbSR5SVe9ON0/nzdLNffmT6Z5Dn02u+f5/KMkxVfW6dD/H3UneuoLpUk5K9317SlXdK90iXIckeWS6QPMpc1Z9f32697JfSvd++ZZ075fHpXu/vPUy6wBgSgguAdiwWmufraqfTTcU9SHpeibtTPfh9OGZE1z2x7yrqu6c5PfSLTZxTLqA5dx0H3rnG8K81Lo+WlV3TfcB7+fTDSm+JMmXM6dXXWvtU1V1l3Rz6/1iulBvT7oP9x9PtwjDBUNe9w+q6uNJnpLk0ek+HH4lXa+W/9f3llnp1/bdqvqVvq7HJjmg3/XaDDGvYv/1HpvkBUm2pfu/zCfT/by+lykILltrraoeluTZ6UKPp6b7eb06XTD7y+mG4i7F19KFEdv7+/3SDbH849baO+Zc/8qqemC658zx/fWvTvd9PLG1ttD8iaPwgnTzxz4kXUiyOd3X/bZ0r8OHpQvTfjbd8/gb/faTW2vfHWNds+6Xhacc+HqSk4c5SWvt5H7Oy79L8p9V9YutteusjD2k2d6Wr1jkmjNV9V/pAsOHpO9l21p7W9+b7cnp3k+OS3JQuveUz6d7T3n53B6Ti1zrfVX15XRTN1yY7g89c70jXU+9n04XVh6U7nn+n0n+qrX2gSGvdXpVPTjde8Yj04Xd7033Pn1SRhRc9gsP3SPd6+FX0vVW3pwuZPxsuj90fXqgfUvy6Kp6R7q5RR+Z7nV3XrrfI28daLui971R6f/Y9ctJfr2v4xfTLcbz7XTvIc9J1yNy0G+kC+EfnOTX0vXq3ZVkWcFlv5jUfZM8K9379jPSheYfTvKi1to757Rv/R/zTuprfkq67/GrkvxxksuXUwcA06O637kAAKy1PlB8Z5LtrbVnDdF+a7rA4dWttceOtzoAAFhd5rgEAFhlVXX4PNtukq7HZHLtOSkBAGBDMlQcAGD1/VU/xP8D6YZpHpluGO+Nk7xskbkMAQBgQxBcAgCsvjPSra78kHRzPl6ebqGnf8xoVm8HAICpN9VzXFbVEUn+PF0Phf3TLVrwuNbaR9e0MAAAAABgRaa2x2VV3SjJ+5OcmS64/HaS2yZZjRUnAQAAAIAxmtoel1W1PclPttaOWetaAAAAAIDRmubg8rNJ3pFuMvv7JTknyUtba/+wl2P2S7LfnM03TnLhuOoEAAAAgHXuwCTnthEHjdMcXF7e//Ovkrwpyb2SnJLkt1trr17gmOclee6qFAgAAAAAG8eRrbVzRnnCaQ4ur0zy0dbaTwxs++sk92qt3XeBY+b2uDwwya50vTYvGWO5AAAAALAezeZrB7fWLh7liad2cZ4k5yX57Jxtn0vyKwsd0Fq7IskVs4+ravafl4z6GwsAAAAA691AvjZym8Z25vF7f5Lbz9l2uyRfX4NaAAAAAIARmubg8sVJ7lNVz66q21TV8UmekOQla1wXAAAAALBCUxtcttY+kuRhSX4tyf8meU6SE1trr1vTwgAAAACAFZvaxXlGoaoOSnJRxjB5KAAAAACsd+PM16a2xyUAAAAAsH4JLgEAAACAiSO4BAAAAAAmjuASAAAAAJg4gksAAAAAYOIILgEAAACAiSO4BAAAAAAmjuASAAAAAJg4gksAAAAAYOIILgEAAACAiSO4BAAAAAAmjuASAAAAAJg4gksAAAAAYOIILgEAAACAiSO4BAAAAAAmjuASAAAAAJg4W9a6AAAAAABYb7aetGNzkmOSHJbkvCQ7Z7Zv2722VU0XPS4BAAAAYIS2nrTj4UlmkpyZ5LT+fqbfzpAElwAAAAAwIn04eXqSI+bsOiLJ6cLL4QkuAQAAAGAE+uHhp/QPa87u2ccn9+1YhOASAAAAAEbjmCRH5rqh5axKclTfjkUILgEAAABgNA4bcbsNTXAJAAAAAKNx3ojbbWiCSwAAAAAYjZ1JdiVpC+xvSc7u27EIwSUAAAAAjMDM9m27k5zQP5wbXs4+PrFvxyIElwAAAAAwIjPbt52R5Lgk58zZtSvJcf1+hlCtLdRzdf2rqoOSXJTk4NbaxWtdDwAAAADrw9aTdmxOt3r4YenmtNy5HntajjNfE1wKLgEAAABgWcaZrxkqDgAAAABMHMElAAAAADBxBJcAAAAAwMQRXAIAAAAAE0dwCQAAAABMHMElAAAAADBxBJcAAAAAwMQRXAIAAAAAE0dwCQAAAABMHMElAAAAADBxBJcAAAAAwMQRXAIAAAAAE0dwCQAAAABMHMElAAAAADBxBJcAAAAAwMQRXAIAAAAAE0dwCQAAAABMHMElAAAAADBxBJcAAAAAwMQRXAIAAAAAE0dwCQAAAABMHMElAAAAADBxBJcAAAAAwMTZstYFAAAAALB0W0/asTnJMUkOS3Jekp0z27ftXtuqYHT0uAQAAACYMltP2vHwJDNJzkxyWn8/02+HdUFwCQAAADBF+nDy9CRHzNl1RJLThZesF4JLAAAAgCnRDw8/pX9Yc3bPPj65bwdTTXAJAAAAMD2OSXJkrhtazqokR/XtYKoJLgEAAACmx2EjbgcTS3AJAAAAMD3OG3E7mFiCSwAAAIDpsTPJriRtgf0tydl9O5hqgksAAACAKTGzfdvuJCf0D+eGl7OPT+zbwVSr1hYK6Ne/qjooyUVJDm6tXbzW9QAAAAAMY+tJOx6ebnXxIwc2n50utDxjbapaH/oV2Y9JN0/oeUl2CoIXNs58TXApuAQAAACmkIBt9BYIhHclOUEgPD/B5ZgILgEAAABIrgktT+8f1sCu2fDsOOHldY0zXzPHJQAAAAAbWt979ZT+Yc3ZPfv45L4dq0RwCQAAAMBGd0y64eFzQ8tZleSovh2rRHAJAAAAwEZ32IjbMQKCSwAAAAA2uvNG3I4REFwCAAAAsNHtTLd6+EKrWLckZ/ftWCWCSwAAAAA2tJnt23YnOaF/ODe8nH18Yt+OVSK4BAAAAGDDm9m+7YwkxyU5Z86uXUmO6/eziqq1hXrArn9VdVCSi5Ic3Fq7eK3rAQAAAGBtbT1px+Z0q4cflm5Oy516Wi5snPma4FJwCQAAAADLMs58zVBxAAAAAGDiCC4BAAAAgImzboLLqjqpqlpVnbzWtQAAAAAAK7MugsuquleSJyb51FrXAgAAAACs3NQHl1V1gySvS/JbSb67xuUAAAAAACMw9cFlkpck2dFa+6/FGlbVflV10OwtyYHjLw8AAAAAWKota13ASlTVo5LcPcm9hjzkWUmeO76KAAAAAIBRmNoel1V1VJJTkvx/rbXLhzzsz5IcPHA7ckzlAQAAAAArUK21ta5hWarql5P8a5LdA5s3J2lJ9iTZr7W2e55DB89xUJKLkhzcWrt4TKUCAAAAwLo0znxtmoeKvyvJj83Z9qokn0/y54uFlgAAAADA5Jra4LK1dkmS/x3cVlU/SPKd1tr/zn8UAAAAADANpnaOSwAAAABg/ZraHpfzaa3df61rAAAAAABWTo9LAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4BAAAAgImzZa0LAAAAYHhbT9qxOckxSQ5Lcl6SnTPbt+1e26oAYPSqtbbWNayZqjooyUVJDm6tXbzW9QAAAOzN1pN2PDzJKUmOHNi8K8kJM9u3nbE2VbHWhNnAWhpnvmaoOAAAwBToQ8vTkxwxZ9cRSU7v97PB9D/3mSRnJjmtv5/xfADWAz0u9bgEAAAmXN+jbiZdSFnzNGnpel4erafdxjEQZifXfl7MftA/Tk9cYNz0uAQAANjYjkk3PHy+0DL99qP6dmwAfZh9Sv9w7vNi9vHJfTuAqSS4BAAAmHyHjbgd00+YDax7gksAAIDJd96I2zH9hNnAuie4BAAAmHw7081hudAiBS3J2X07NgZhNrDuCS4BAAAmXL/gzgn9w7nh5ezjEy3Ms6EIs4F1T3AJAAAwBfrVoY9Lcs6cXbti9egNR5gNbATV2kJ/nFn/xrlcOwAAwDj0q0Qfk27uwvOS7BROTb/l/ly3nrTj4elWFz9yYPPZ6UJLYTYwduPM1wSXgksAAICpM20B7t7qXSB83JXkhGHCx2n7XgDri+ByTASXAAAA02elQd9q21u9/b9P7+9rYP/sh3XTAAATTXA5JoJLAACA6dKHgFMT9A1R74VJbjxn32CbXUmO1oMSmFSCyzERXAIAAEyPfkj0TJIjsspB33KGYw9Z73zb5zp2Zvu2s5ZbB8A4jTNfs6o4AAAA0+KYdMOtFwr7KslRfbuR6XtNziQ5M8lp/f1Mv31vhql3GIetsA6AqSS4BAAAYFocNuJ2ixoY6n3EnF1HJDl9kdBwVHWct8I6AKaS4BIAAIBpcd6I2+1VPyz7lP7h3N6Rs49P7tutpI6F5nBrSc5O8v4V1gEwlQSXAAAATIud6eawXCzo2zmi6610aPow9V4w8O+5+5LkxCQ/ucI6AKaS4BIAAICp0C9Cc0L/cMGgb4SL1axoaPqQ9T4xyXFJzpmzf1d+uEL6qg+RB5gEgksAAACmRh/kLRb0jcqwQ71vs9COYert22xNcmyS4/v7owe+llUdIg8wKaq1hXqsr3/jXK4dAACA8ennczwmXS/D85LsHGFPy8FrzKQbpr03Z6cLGhe8/krqHajjiMw/XLylC0L3WgPAOIwzXxNcCi4BAABYwNaTdjwnyR8P0fTYme3bzhpjHbOriifXDi9nP9SPurcpwFDGma9tGeXJAAAAYJ358pDtRjK/5EI9M2e2bztj60k7jku3uvhgD9Bd6eb1FFoC687UznFZVc+qqo9U1SVV9a2qenNV3X6t6wIAAGBdWbX5JftelTNJzkxyWn8/02/PEHNhAqwrUztUvKrenuSfk3wkXc/RFya5U5Ifaa39YMhzGCoOAADAglZrfklDwYFpZY7LIVTVTZN8K8n9WmvvHfIYwSUAAAB7Ne5Q0eI7wDQbZ742tUPF53Fwf3/hQg2qar+qOmj2luTA1SkNAACAadWHksclOWfOrl0ZTU/IY9LNWzlfaJl++1F9O4ANY10El1W1KcnJSd7fWvvfvTR9VroEePa2a/zVAQAAMO3GPL/ksAv7jGQBIIBpsV5WFX9Juvktf2qRdn+W5K8GHh8Y4SUAAABD6IdpnzWGU6/aAkAA02Tqg8uq+tskv5jkp1trew0hW2tXJLli4NgxVwcAAHBd/ZyGx6TrQXdekp3TPnfhevyaVtHOdJ1qFpvjcudqFgWw1qZ2qHh1/jbJw5I8oLX2tbWuCQAAYDH9Qi8zSc5Mclp/P9Nvn0rr8WtaTX3Ae0L/cO4KurOPTxQEAxvN1AaX6YaH/3q6uUUuqaqb97frr3FdAAAA8xpYnfqIObuOSHL6NAZ96/FrWgursAAQwNSp1ub+MWc6VNVChT+utXbqkOcY23LtAAAAg/qh1DNZfDjw0dPSs249fk1rzZB7YNqMM1+b2jkuW2smqAQAAKbJMUmO3Mv+SnJU3+6s1ShoBFb9a1rvwd4YFwACmDrTPFQcAABgmhw24naTYFW/JnNpAmwsU9vjEgAAYMqcN4p2E9bjcCRf0zAG5tKca3YuTfNAAqwzelwCAACsjp3p5ntcaL7+luTsvt28JrDH4Yq/pmH0Ye0p/cO504bNPj65bwfAOiG4BAAAWAV9r8gT+odzg77Zxycu1HtyElfvXunXtASzc2kutNbB4FyaAKwTgksAAIBV0g9lPi7JOXN27Uqy4FDnSe5xuNyvaYnW4/ygACxCcAkAALCK+iBva5Jjkxzf3x+9SMA30T0Ol/k1LcWqzaUJwOSwOA8AAMAq64dOn7WEQya+x+EyvqalmJ1L84jMH962fv+K5tIEYLLocQkAADD5NnSPw1WcSxOACaLHJQAAwOSb6h6H/dybx6TrEXpekp1LDRlntm87Y+tJO45LN9fnkQO7dqULLUc1LB2ACVGtzf1j1cZRVQcluSjJwa21i9e6HgAAgIUMrCqeXDu8nP1QN6qFcEaqr3u+sPGE5dQ7ihAUgNEZZ74muBRcAgAAU2KBEPDsTGiPw2kNWwEYnuByTASXAADAtJmWHod9nTNZfHj70ZNYPwDDGWe+Zo5LAACAKTLm1btH6Zhcu2foXJXkqL7dWatREADTxariAAAAjMNhI24HwAYjuAQAAGAczhtxOwA2GMElAAAA47Az3RyWCy2s0NItLLRz1SoCYKoILgEAABi5fi7OE/qHc8PL2ccnWpgHgIUILgEAABiLme3bzkhyXJJz5uzaleS4fj8AzKtaW6jX/vo3zuXaAQAA6Gw9acfmdKuHH5ZuTsudeloCrA/jzNcEl4JLAACAVSHABFh/xpmvGSoOAADA2G09acfDk8wkOTPJaf39TL8dAK5Dj0s9LgEAAMaqDydP7x/WwK7ZD6QLzneplybAZNPjEgAAgKnUB4+n9A9rzu7Zxyf37eYeq5cmwAYmuAQAAGCcjklyZK4bWs6qJEf17a4x0EvziDntj0hyuvASYP0TXAIAADBOhy213Up6aQKwfgguAQAAGKfzhmx3m4F/L6uXJgDri+ASAACAcdqZZFd+uBDPQp4/MPx7yb00AVh/BJcAAACMTb8C+AlDNp8d/j1sL81h2wEwhQSXAAAAjNXM9m1nJHnuIs0Gh38v1kuzJTm7bwfAOiW4BAAAYDV8ech2h83ppTk3vJx9fGLfDoB1astaFwAAAMDk64dwH5NuXsnzkuxcYnB4/lLazWzfdsbWk3Ycl2518SMH9u9KF1qesYRrAzCFBJcAAABrZARh4KroF825ToC49aQdJ4wzQOzDy7dkCr5HAIxetbbYwm7rV1UdlOSiJAe31i5e63oAAICNY6EwMMlYw8Cl6us8vX9YA7tmP0weN1+984Syhyd53RCXPH5m+7bXL79iAFbTOPM1waXgEgAAWGXLDQPHUMdee3z2+2eSHDGnzlktXdh69Jzj5gtlv53kpkOUdezM9m1nLekLAWDNjDNfszgPAADAKurDwFP6h3PDwNnHJ/ftxlnHw9OFkmcmOa2/n+m3zzomXfg4X2iZXHsl8MHznp4u7Bx0SH9vpXAAhiK4BAAAWF1LDgNHbS/h4hFJTh8ILw8b8pSH9eddLJRtA/eDrBQOwHUILgEAAFbXksLAUVtij8/zhjztbLthQtkkuWDO9l1ZpeHxAEwPq4oDAACsrmHDwPPHdP3ZcHEhgz0+d6YLFfc2x+W3k7y/fzxs2HpiknNjpXAA9mJFwWVV3SfJsUluluSlrbUvVdX+Se6Q5Iutte+PoEYAAID1ZLEwcNart560YxwrjA8bLj50Zvu2s7aetOOEdMPKZ4d5D6p0nwe/2rcbNpQ91wI8ACxmWUPFq2rfqjoj3V/V/jTJ09L9RS5J9iR5Z5ITRlIhAADAOtL3LJz9vLTQQjXJdeebHJVhw8WnbT1px+zw7eOSnLOXtkekCzcPSRfKWoAHgBVb7hyXf5LkF5M8KcntM/BXt9ba5UnelOShK64OAABgHRoyDBzXCuM70w3vXsymJG/aetKOh/f13movx83W+ldJnt7/2wI8AKzIcoPLX0vyd621lye5cJ79n0v3Sw0AAIB59GHgYxZpNjvf5P1GeN3dSV47ZPOWHwanP5nkpntpO1vrBZk/lLUADwBLstzg8mZJPr2X/buT7L/McwMAAGwUhw7Z7k0jHjL+1iHbDS7UM/Rq6H04uTXdmgjH9/dHCy0BWIrlLs5zdroFeBbyk0m+vMxzAwAAbBTDzjd5o3TzXY6qx+KwCwTNml39exjnJdf07DxrOcUBQLL84PK0JM+oqn9J8sV+W0uSqvqtJI9MctLKywMAAFjXhg0QKz8ctv2WwTki+2HcxyQ5PN3ouG8lOTfJzoXmkpzZvm33wGrhwzhviFpbv9/COwCMxHKHiv9pkg8keW+SM9P9gnpxVX0jycuSvD3Ji0dSIQAAwDq1hBXGk2sP206S9MPHZ9J9Lntdus9hr+sfz+xteHnfc/OR6ab6Wsg1q4AvUquFdwAYuWUFl621K5M8OMnjknw1yeeT7JfkU0kem+QhrTW/rAAAABYxsML4d4c85LDkmtDy9CRHLtDuiHTDy/cWXp6e5FH9w0XDyL2shm7hHQBGrlpb7I96cw6oun66HpdnttbeNpaqVklVHZTkoiQHt9YuXut6AACAjWvrSTsekORdQzQ9Nt1w7JksPsR8dvj2Y9MtBHRe5hlC3oebp+TaIejZ6ULL64SRA8PTZ+e+XHBYOgDr2zjztSUHl31BP0hyQmvtFaMsZrUJLgEAgEnRh4Ez2XsYuTtdD8kL0g0HX45vJ3ltupXFrwkchZEALMckBpfvTfKx1tqJoyxmtQkuAQCASTIw/DtZeAGcJDk5ydNHcMldSU4wxBuA5RpnvrbcxXlOTPKoqvrNqlruyuQAAAAMGFgwZ88CTaq/PX5El1x0HkwAWCvLDS5PTfeL9GVJLq6qL1XVp+bcPjmyKgEAADaOC5JsXqTNwSO61myvzpP7oeIAMDGW21vywiTfSfKFEdYCAABAv2r4KqokR6Wb3/KsVb42ACxoWcFla+3+I64DAACAznlrdN3VDkwBYK+WO1QcAACA8ThkROe5MD9czGcYaxWYAsC8lh1cVtXmqnpMVb2xqj7U395YVY+uKnOjAAAALFE/z+SLR3S6k/v7xcLLluTsJDtHdF0AGIllDRWvqoOTvCPJvZJckuSr/a4HJvmVJE+qqp8b9RLoAAAA60EfUB6Tbnj2eUl2zmzftrvfduQILvGtJC9M8pkkp+zlnLOh5on99QFgYiy3x+WfJrlHkqcmuWlr7e6ttbsnuVmSpyS5Z98GAACAAVtP2vHwJDNJzkxyWn8/028f1TyTvzOzfdvume3bzkiyNcmx6XpyfntOu11JjuvbAcBEqdaWMuVJf1DVOUlOb62dsMD+v05yXGvt8BXWN1ZVdVCSi5IcrHcoAAAwbn04eXr/sAZ2zX4we26SPx7ydG3OOWb9xcz2bX+wwPUX6ukJAMsyznxtWUPFk9wkyRf2sv/zSW68zHMDAACsO31oeErmDxsrXRD5W+l6QR6xQLvW739Guh6Ug0PAv5Wup+Xp8xyXJOlDyrOWUT4ArLrlDhX/cpJf2sv+X0rylWWeGwAAYD16dvY+f2UlOSrJy/vHc4fHDc5HeXp+OAT8+P7+8L2FlgAwbZY7VPzJSf42ydvTrVT3xX7X7ZM8LcmDkzyltfZ3oylzPAwVBwAARm2+4dhJHppuiPh8vSjnOj7JFbnuojpnpwstzUcJwMSYuKHirbWXVtXNkpyU5Ofm7L4qyR9PemgJAACwHHubJ3LrSTuOS/LSJDcdOGRXkusv4RLnzWzfdtbWk3a8ZaHrDFMLAEy7ZfW4vObgqkOS/GySW/abvp7kv1prF4ygtrHT4xIAAFiKfnGduT0hdyU5Icm9k/z+PIcttIjOfO12JTl6mPBxb7XolQnAahlnvrai4HLaCS4BAIBhLbIi+DDB5DBenOStWaTn5BCrkx8nvARgNUxccFlVP5vkAa21Zy+w/0+TvKu19u4V1jdWgksAAGCuBeaoTJKZLLza96gt2HOyr29vtSyp5yYArMQ487Xlrir+nHSr3S3kiCR/tMxzAwAArIm+J+NMkjOTnNbfn5fu882RWZ3QMuk+U53e1zPXMYvUMrs6+TFjqg0AVsVyg8sfS/Khvez/SJI7L/PcAAAAq25g+PWRc3bdNMnzRnSZYYe8zYaSJ/c9LAcdNuQ5hm0HABNpucHlfkn2XWT//ss8NwAAwKrqw8FTMt4elRclOWcJ7RfqOXnekMcP2w4AJtJyg8v/TfKw+XZUVSV5eJLPLrcoAACAVTY7/Hqc/iHJ1iTHJjk+yZ8MedzcnpM7081huVDvzZbk7Pxwbk4AmErLDS7/JslPVtWbqurHqmpLf7tzkjcluW/fBgAAYBqsxrDqX02Sme3bzprZvu31SYZdzPRaPSf7BXdO6B/ODS9nH59oYR4Apt2W5RzUWnttVd063SI9D0+yp9+1Kd0vyhe01l49mhIBAABGY74Vw/uAb6XDql+V5HGLtJkd9n1W/3i25+Riq4Nfp+fkzPZtZ2w9acdx6Ya3D/YU3ZUutLzOauQAMG2qtWHnhp7n4C68fFiSW/WbvpLkza21r4ygtrEb53LtAADAZOkX35kb9H07yWuT/FuSf063EM9y/G2SpwzR7vi+t+VgTaf3DwfDy9kPasftLYTcSxALAKtinPnaioLLaSe4BACA9WHrSTv2SRcc/lSS7yd5TZIzZ0O8vQSEg76T5CbLLOGfkjx6iHbHzmzfdtbghgUC1bOj5yQAU2Dig8uqukOSR6T7K9/nk5y6WkFgVf1OkmcmuXmSTyZ5amvtw0MeK7gEAIApt/WkHS9K8oxcdw7/S5I8Nslbksxk4SHZs9oi+1didtj30fP1iNRzEoBpNRHBZVU9JcnTkvxEa+2Cge0PSbcgz74Dzb+a5D6D7cahqn413V82fzvJh5KcmC5AvX1r7VtDHC+4BACAKdQHffdL17Py8L00bUmem+SPV6OuvdSQLDLsGwCm0TjztaWsKv5LSb4yJ7TckuQVSXanm4j6x5KclOSWSf5whHUu5BlJ/qG19qrW2mfTBZiXJnn8KlwbAABYA/2iNBcmeVf2HlomXQ/KExZpM26Vrl4AYAmWElz+SJL/nrPt2HSTV7+4tfbq1tpnWmt/keSNSX5hRDXOq6r2TXKPJP81u621tqd/fN8Fjtmvqg6avSU5cJw1AgAAo7P1pB2bt56047R0I74OWsKhy523cpRunOT0fj5LAGAISwkub5JuguhBP5Nu2MO/ztn+/iS3WEFdwzgkyeYk58/Zfn66+S7n86x0XVdnb7vGVh0AADAyfeB3fpJfW+YpLswPh2yvhdm5M0/uh7kDAIvYsoS28wWCx6Qbmv3JOduv7G+T5s+S/NXA4wMjvAQAgIk0sGDNLyV5+gpPd3KS52e8C/AsppIcle5rOmuNagCAqbGUHpcfTfKYqjowSarqR5P8eJJ3tNauntP2Dhl/IHhBurk1D52z/dAk35zvgNbaFa21i2dv6VYZBAAAJkzfw3ImyZlZeWj53SQvTHJcknNWeK5ROGytCwCAabCU4PL56Rbd+VJVvSvdcPCWrhfjXA9L8oGVl7ew1tqVSf4n3XD1JElVbeoff3Cc1wYAAManDy1PT3LEiE752zPbt+3uV/TempUHoSt13hpfHwCmwtDBZWvt00kekC4sPDzdQj2/0Fr7n8F2VXX/dMPH3zSyKhf2V0l+q6oeU1V3TPJ3SQ5I8qpVuDYAADBi/fDwU/qHoxjS/eaZ7dveOPtgZvu23Un+Jt0IsYXmvGzp5sRfzkrguxc579lJdi7jvACw4VRrazk/9cpV1VOSPDPd/JufSPK01tqHhjz2oHT/ITm4HzoOAACsooF5LA9L1xNxU5J3jej0fzmzfdszF7jubK/O5NoB6ewHpOOSvKWv7VeSPGWI6/1Jkk8lmQ1K5z1v3/MTANaFceZrUx9croTgEgAA1k4fHp6S5MiBzRcmufEKT/2DJDeZ2b7timVc/+wkJw6Gi1tP2nH/dHNtLubYme3bzhr2vACwHowzX1vKHJcAAAAjMdDj8cg5u260gtO2/vboxULLJBmY8/LYJMf390fPEy7uzOJDy68ZAr6E8wIAe6HHpR6XAACwqvrh4ecnucmITz22Xo3DDC0XTAKwEY0zX9syypMBAAAM4dkZXWj5miRvT3Jukp394jt7Nc+8moseN7N92xlbT9pxXK47BHxXDAEHgLHQ41KPSwAAWDV9aPitrHwey1nHzmzfdtYSrj/f/JO7kpwwZ17LecPNpW5f0VcGAFPA4jxjIrgEAIDVtYSFbpLk20kOybWHZs9q6QLHo4cNCJewkvizk5yYa4er1wk355x30TAUANajiVycp6oOqqqTquodVfXxqvrxfvuNq+oZVXWb0ZUJAACsE4cN2e47SZ7c/3tub4vZxycuIbTcnC5cTK4bhM4+flm6uTf/ONftEXpEktP7kHLwvLNh6BHDtAcAhres4LKqjkzy8XS/0I9McuckN0iS1tqFSZ6Y5KkjqhEAAFg/zhuy3Skz27ednq4X5Dlz9u3K0hfDOSbdZ5f5em+m335IFp57c/a4k/sQdNgw9Jr2AMDSLHdxnhclOTDJXdPNT/OtOfvfnOQXl10VAAAwlYaY63FnuuDxiCwcIl6Q5IXJNYvivGWRcw5z3WF7eu5NJTmqv85Z+WEYOmz7VWPOTQDWg+UGlw9K8uLW2merar6/SH413S9oAABgg1horsetJ+24Zq7HfiGbE9INr26Zf67JJw6GbP2/z1rJdTN8T89hHDbnftj2q2LI7wcATLzlznF5/XQTZS/kwGWeFwAAmEJLmeuxD89GMgR8Cded7ek5itVJz5tzP2z7sTPnJgDryXKDy88m+em97P/ldHNgAgAA69xy5nrsw8mtSY5Ncnx/f/QSQ8uhr9v32jyh37bQYj+LaUnOTheCJouHoXPbj5U5NwFYb5Y7VPzkJK+uqk8leVO/bVO/kvhzk9w3ya+svDwAAGAKDDvX41O3nrTj/Fx7zsWzVuG6xyQ5q58v87jMM4w6yf7pVhJfaN7NWdesZD7ksPehVz4fgYmdcxMAlmNZPS5ba69N8n+TvCDJF/vNb0/yhSSPSvLs1tqbR1EgAAAw8Yadw/HFSU5LcmaSmREMW17yHJML9fRM8oS+yUK9Jy/IPMPYRznsfQQmcs5NAFiu5fa4TGvtT6vqNel6Vt4mXQj6lSRntNa+OqL6AACAybecORxn51xcSbi3rDkmF+jpuVBvzO/02164UM/JYVc+XwUTN+cmAKxEtTaKuamnU1UdlOSiJAe31i5e63oAAGAa9XMmzqQLIxcbaj2opeuZePRyQr4hrrvk8/fnXOsAclnG8f0AgMWMM19bVnBZVXdPcp/W2ksX2P/kJB9orX1iZeWNl+ASAABGo++t+KZFG87v2Jnt285a5nVnV9FO5p9jcrWHa68p3w8AVts487Xlrir+p0l+di/7H5Bu/ksAAGBjuGAFxy57zsUJm2Nyzfl+ALCeLDe4vEeSnXvZvzPJPZd5bgAAYPqsZMGXFc25OLDgzs8k+ZP+9tgkb1nJeafVQgsQCS0BmDbLXZznwCRX72X/niQHL/PcAADA9FlO+Dg75+LeOkUM66G59sI6z0mya+tJO07YiIHdAgsQAcBUWW5w+aUkD0ryNwvsf3ASK4sDAMAEGtMCNDvThZDDLtAzO+fiiSu99px5HQeNYuVyAGCNLHeo+D8m2VZVf1VVN5zdWFU3rKoXpwsu/3EE9QEAACPUh3wzSc5Mclp/P9NvX7Y+fDyhfzh3BdD5VgQdyZyLfQh7Sv9wbmA6+/jkvh0AMEWWG1z+dZJXJzkxyQVV9Y2q+ka6CblPSPLaJC8eSYUAAMBIDPRMPGLOrtmeiSsNL/e2MMwjMp45F49JNzx8oV6eleSovh0AMEWqtfn++DnkwVXHJvmVJLfqN30lyb+01s5aeWnjN87l2gEAYJL0PQ5nsvBQ7tn5Jo8ewdDtcQxFX+hav5au5+hijp/Zvu3146gBADayceZry53jMknSWjsz3dASAABgss32TFzIYM/Es1ZyoVVeGGbYRYFWtHI5ALD6VhRcAgAAU+OwYdutZo/JEVhsUaBRrlwOAKyiZc1xWZ0nVtWHq+qCqto9z+3qURcLAAAs27A9Dm+TMSzeMy5DLgq04pXLAYDVt6w5LqvqRUmekeQT6f5y+d352rXWnr+S4sbNHJcAAGwUQ85x+Z0kN+kf15x9yQhWAR+XPlg9JdceDn92utByImsGgPVgnPnacoPLbyU5q7X2yFEWs9oElwAAbCQDq4on8weTFya5cca8eM+4TNkQdwBYFyYxuLwkye+21l4+ymJWm+ASAICNZi89E/8hyR8PcYpjZ7ZvO2sMpQEAU2ic+dqy5rhM8q4k9xplIQAAwPj1w6a3Jjk2yfH9/dFJvjzkKYZd5AcAYEWWu6r4k5O8o6qeneRlrbXvjLAmAABgjPrh02cNbtt60o5hF+8Zth0AwIost8flF5LcKsmfJPlWVf2gqi6ec7todGUCAABjtjPdHJYLzSXV0g0p37lqFQEAG9pye1z+Sxb+Dw0AADBlZrZv2731pB0npFu8p2X+xXtOtNgNALBalrU4z3phcR4AALi2vSzec2I/PyYAwDUmblXx9UJwCQAA17X1pB2bkxyTbiGe85Ls1NMSAJjPRAaXVXWLJM9OtwrhzZI8tLX23qo6JMn/TfKq1trHR1bpGAguAQBYzwSQAMC4jTNfW9biPFX1I0k+nuRXk3wtyUHp58tsrV2Q5KeSPGVENQIAAEvUD/meSXJmktP6+5l+OwDAxFvuquJ/keR7SW6X5Ndz7Ym7k2RHur/sAgAAq6wPJ09PcsScXUckOV14CQBMg+UGlz+d5O9aa9/O/KuLfyPX/U8SAAAwZv3w8FP6h3M7GMw+PrlvBwAwsZYbXG5Kcule9t80yRXLPDcAALB8x6RbEXxuaDmrkhwVI6QAgAm3ZZnHfSzJtiQvnbujqrYkeVSS/15BXQAAMFUmaCGcw0bcDgBgTSy3x+WfJXlwVf1dkjv12w6tqp9N8s4kd0yyfQT1AQDAxJuwhXDOG3E7AIA1sazgsrX2H0kem25V8Xf3m1+bLrS8e5JHt9beO4oCAQBgkk3gQjg7k+zK/HPRp99+dt8OAGBiVWsL/X9miIOrDkjyoCS3SReCfiXJO1prl4ymvPGqqoOSXJTk4NbaxWtdDwAA06UfHj6TLqScb07Jli5EPHo1h40PhKmZU9fsf/6Pm9m+7YzVqgcAWL/Gma8td6h4kqS19oPW2r+21l7UWvvz1trp0xJaAgDACEzkQjh9KHlcknPm7NoVoSUAMCWWtThPVd1imHattW8s5/wAADAlJnYhnJnt287YetKOt2QyFgwCAFiy5a4qPpOF58wZtHmZ5wcAgGkw0Qvh9CHlWWtxbQCAlVpucPn4XDe43Jxka5JHJ/lWkpcsvywAAJgKswvhLDbHpYVwAACWaEWL88x7wm7Bng8leXlr7a9HevIRszgPAAArZSEcAGAjm9jFeebTWvtBklclefqozw0AAJPGQjgAAOMx8uBy4Lw3H9O5AQBgovTh5NYkxyY5vr8/WmgJALB8Ix0q3ncN/ekkr0zy5dbaT4zs5GNgqDgAAMBk23rSjs1JjklyWLqFrnb2C08BMAEmbqh4Ve2pqt1zb0m+m+StSS5N8uRRFgoAAMDG0s8hO5PkzCSn9fcz/XYA1rll9bisqufluquKt3TB5VeSvLO1dvWKqxszPS4BAAAmk4WvAKbDOPO1ka8qPk0ElwAAAJOnHx4+k+SIXDu0nNXSLYB1tGHjAGtr4oaKAwAAwBgdk+TIzB9apt9+VN8OgHVqyzCNquqVyzh3a639n2UcBwAAwMZ22IjbATCFhgoukzwg153TcjEbdww6AAAAK3HeiNsBMIWGCi5ba1vHXAcAAADM2pluDsvF5rjcuZpFAbC6hlqcp6o+luTZrbW3948fneS9rbWZ8ZY3XhbnAQCA6dYv4nJMuiHD5yXZabGW9cGq4gDTYRIW57lzkkMGHr8qyU+MshAAAICl6IOtmSRnJjmtv5/ptzPl+lDyuCTnzNm1K0JLgA1h2ODy60l+tqo2948r5rAEAADWyEBvvCPm7DoiyenCy/WhDye3Jjk2yfH9/dFCS4CNYdih4s9M8udJdie5LMkBSa5IcvVeDmuttYNHUeS4GCoOAADTpx8ePpPF5z882rBxABivceZrwy7O86Kq+mS6v24dmuQxST6S5KujLAYAAGAIxyQ5ci/7K8lRfbuzVqMgAGD0hgouk6S19s4k70ySqnpskpe11k4bU10AAAALOWzE7QCACTR0cDmotTbs3JgAAACjdt6I2wEAE2hZweWsqjowyS2T3CjzzC3TWnvvSs4PAAAwj53p5rBcbI7LnatZFAAwWkMtznOdg6oOSfI3SX4lyeb5mqRbnGe+fRPD4jwAADCdBlYVT64dXs5+wDnOytMAMH7jzNeWG1yekeQhSf463V8xvztfu9bae1ZU3ZgJLgEAYHr14eUpufZCPWcnOVFoCQCrYxKDy+8neWlr7fdHWcxqE1wCAMB023rSjs3pVg8/LN2cljtntm/bvbZVAcDGMc58bblzXF6aZGaEdQAAACxZH1KetdZ1AACjt9zVwV+b5GGjLAQAAAAAYNZye1yenuR+VfX2JC9PN4/MdYZjtNY+toLaAAAAAIANarnB5fsG/v3AefZXutX8JnpVcQAAAABgMi03uHzcSKtYoqramuQ5SR6Q5OZJzk03fP1PW2tXrmFpAAAAAMAILCu4bK29etSFLNEd0s3P+cQkX05ypyT/kOSAJL+3hnUBAAAAACNQrbW1rmEkquqZSZ7UWrvVXtrsl2S/gU0HJtmVMSzXDgAAAADrXVUdlOSijCFfG7rHZVU9Y4nnbq21Fy/xmJU4OMmFi7R5VpLnrkItAAAAAMAKDN3jsqr2LPHcrbW2KovzVNVtkvxPkt9rrf3DXtrpcQkAAAAAIzIRPS6THD3KC8+nqrYn+YNFmt2xtfb5gWOOSPL2JG/aW2iZJK21K5JcMXDsCqoFAAAAAMZloua4rKqbJrnJIs2+OrtyeFUdnuSsJP+d5LGttSX1Ch1nIgwAAAAA692k9Lgcu9bat5N8e5i2fU/LM9MNEX/cUkNLAAAAAGByTVRwOaw+tDwrydeT/F6Sm84O+26tfXPtKgMAAAAARmEqg8skD0xym/62a84+E1cCAAAAwJTbtNYFLEdr7dTWWs13W+vaAAAAAICVm8rgEgAAAABY35YUXFbV4f1K3ou1OWxlZQEAAAAAG9nQwWVV3SPJN5I8apGmj0ryjar6sZUUBgAAAABsXEvpcfk7Sb6Y5MWLtHtxki8kedpyiwIAAAAANralBJfHJnlja63trVG//01JfmYlhQEAAAAAG9dSgsvDkswM2fYbSfY6FyYAAAAAwEKWElz+IMmNh2x7oySXLr0cAAAAAIClBZefSvKQIdv+Yt8eAAAAAGDJlhJc/lOS+1XVU/fWqKqekuR+SV69ksIAAAAAgI2rFllr54cNqzYl2ZHkQUnemeS1ST6d5JIkByb5sSS/3u//zyQ/v9hCPmutqg5KclGSg1trF691PQAAAAAwTcaZrw0dXPaFXC/JXyZ5QpLNc3cn2Z3kH5L8bmvtslEVOS6CSwAAAABYvokJLgcKOiLJzye5Y5KDklyc5PNJ/qO1tmuUBY6T4BIAAAAAlm+c+dqW5RzUWjsnySsW2l9VhyR5VGvtb5dbGAAAAACwcS1lcZ69qqr9q+r4qtqR5Jwkp4zq3AAAwIhUbUrVTVN1WKr2WetyAAAWsqLgsqo2VdXPV9Vrk5yf5DVJbpPkr5McO4L6AACAUai6Uar+IMlXknwryblJvpWqF6fqVmtbHADAdS13jsv7JPn/kjwyySFJvp7klkme0Fr7x5FWOEbmuAQAYEOoOjrJfyU5Yk/qDf92x2O+fP4NbnzQMTOfOOL23555YCXXS/KwtPZfa1wpADBlJmKOy6q6fbqw8vgkt0r3l9p/SPL6JFck+WKS746yOAAAYIWqrp/k7UnaEx72hye+83b3/cMkj57dfcAVl57zn//45LMPv+SCN6fqHmntC2tWKwDAgKUMFf9skv+T5M1J7t1au21r7Y9aa59JsvRumwAAwGr41SS3e+bPn/DX77zdfV+a5IjBnT/Yb//Df+Y3//6ul23Z94okT1+TCgEA5rGU4PKqJDdKNyT8qKrabzwlAQAAI/SEPcnb33TnBz6zf1xz9tdl+14vr7rnQze35NdTdcBqFwgAMJ+lBJeHJnlakpsmeVOSb1XVP1XVg5NYjRAAACbTj3z88Dt8PcmRuW5oOavOvNU9Dq7kgCRHrV5pAAALGzq4bK1d1Fp7RWvt/km2Jnlhkrsk+fckH043XPwOVbXvGOoEAACWZ88l++1/8GKNNv1w0U7TQAEAE2EpPS6v0Vo7u7X25621uyS5a5K/T3JOkhckuaCq/qWqHjO6MgEAgGX6xF3O+9IdF2t07Fc/mqs3bf5+kq+vQk0AAItaVnA5qLX2qdba77fWbpHkAUnemOTYJK9c6bkBAIAV+/sbXX7JXe559me+lQV6Ux582SXtUZ98x55qe16R1i5f5foAAOY1dHBZVedW1cMGHu9bVY+uqkNnt7XWzmqt/WaSmyc5brSlAgAAy/CvSf7ntW94zr4/ev5Xkjnh5SE/+G571enPq/2vuvwHm1t78ZpUCAAwj2ptuClsqmpPkl9vrZ3WP75Jkm8leWBr7d3jK3F8quqgJBclObi1dvFa1wMAAGPRdTb4jyR3+8At7nz5v9/+J6931eYtudeuz+Yhn3tPkrp4v91X/Uxa++gaVwoATJlx5mtbVnj8QqsSAgAAk6K181P1E0l+9b7f+NST7/uNT92zkk2Xbdnv3C179vzN5m6I+AVrXSYAwKCVBpcAAMA06OaufHUlr05VJdl0/asu373WZQEALERwCQAAG003X5TQEgCYaEsNLh9dVffp/329dBN7P6Wqfnmetq21dsJKigMAAAAANqalLs6zFK21tnnpJa0ei/MAAAAAwPJNxOI8rbVNo7wwAAAAAMBChJEAAAAAwMQZSXBZVQdV1Sur6g6jOB8AAAAAsLGNqsfl9ZM8JsnhIzofAAAAALCBjXKoeI3wXAAAAADABja2OS6r6o7jOjcAAAAAsL4NHVxW1Sv2svvKJO9J8t2+7b2SvHdlpQEAAAAAG9WWJbR9fFVVa+3/zN3RWvtukmOTpKqOTfKWJJeNpkQAAAAAYKNZylDxP0ryuKp65UINquqhSXak63n50yusDQAAAADYoIbucdlae2FV7UnywqralORxrbU2u7+qHpPkFUm+nOSBrbVdI68WAAAAANgQljJUPK217VW1O8mfJ9lUVY9prbWqOiHJXyX5eJIHt9YuGEOtAAAAAMAGsaTgMklaay/qw8u/TFJVNZPkD9MtzvNLrbVLRlsiAAAAALDRLDm4TJLW2l9V1dVJTk7Skrw1ySNba1eOsDYAAAAAYIMaOrisqr+eZ/PXk9wsyblJ/rKqBve11toJKysPAAAAANiIamB9nb037BbmWYrWWtu89JJWT1UdlOSiJAe31i5e63oAAAAAYJqMM19byqrim0Z5YQAAAACAhQgjAQAAAICJM3RwWVU/XlU3HrLt0VX16OWXBQAAAABsZEvpcfnBJA+efVBVN66qS6vqfvO0/Ykkr1ppcQAAAADAxrSU4LLmeXy9JBO9AA8AAAAAMH3McQkAAAAATBzBJQAAAAAwcQSXAAAAAMDE2bLE9lur6u79vw/u729bVd+b0+7oFVUFAAAAAGxo1VobrmHVniRzG9c8267Z3lqb6IV7quqgJBclObi1dvFa1wMAAAAA02Sc+dpSelw+bpQXBgAAAABYyNDBZWvt1eMsBAAAAABglsV5AAAAAICJI7gEAAAAACaO4BIAAAAAmDiCSwAAAABg4gguAQAAAICJI7gEAAAAACaO4BIAAAAAmDiCSwAAAABg4gguAQAAAICJI7gEAAAAACaO4BIAAAAAmDiCSwAAAABg4gguAQAAAICJI7gEAAAAACaO4BIAAAAAmDiCSwAAAABg4kx9cFlV+1XVJ6qqVdVd17oeAAAAAGDlpj64TPIXSc5d6yIAAAAAgNGZ6uCyqn4+yYOS/N5a1wIAAAAAjM6WtS5guarq0CT/kOSXk1w65DH7JdlvYNOBo68MAAAAAFipqexxWVWV5NQkf99a++gSDn1WkosGbrtGXx0AAAAAsFITFVxW1fZ+kZ293e6Q5Knpekv+2RIv8WdJDh64HTnarwAAAAAAGIVqra11Ddeoqpsmuckizb6a5I1JHpJksPjNSXYneV1r7TFDXu+gdD0vD26tXbz0igEAAABg4xpnvjZRweWwquoWSQ4a2HR4knckOS7Jh1prQw0BF1wCAAAAwPKNM1+bysV5WmvfGHxcVd/v//mVYUNLAAAAAGByTdQclwAAAAAAyZT2uJyrtTaTpNa6DgAAAABgNPS4BAAAAAAmjuASAAAAAJg4gksAAAAAYOIILgEAAACAiSO4BAAAAAAmjuASAAAAAJg4gksAAAAAYOIILgEAAACAiSO4BAAAAAAmjuASAAAAAJg4gksAAAAAYOIILgFgLVTtm6pHpeo/UvWZVH0sVSen6vZrXRoAAMAkEFwCwGqr+tEkn0/y+pYc8KWbHPW5z9106/eu2rT5sUk+3weYm9e2SAAAgLW1Za0LAIANpeqWSd6d5PyTHvzUE//5Lj/3e0mOSZL9rr4yj//IW773++999VMraUmevpalAgAArKVqra11DWumqg5KclGSg1trF691PQBsAFWvSPILv/wbf/nsTxx+h1fObh1o0X7rw2fkD898ZSW5XVr70uoXCQAAMJxx5muGigPAaqm6YZLjd9eml37i8Dv8yezWua3+6e6/mIv2O2DP7qonrW6BAAAAk0NwCQCr5x5Jrv/cB/72TJIjc93QMklyxZZ96x23u++mH+x7/W2rWRwAAMAkEVwCwOrZL0m+fOMjD1qs4aX7XC97atP1xl8SAADAZBJcAsDq+UaSPPLT/3nDxRre6fyv5MrN+5w79ooAAAAmlOASAFZLa/+b5KO//JkzfzrJrnQrh1/Hj57/lXbPcz6XG152yf9b1foAAAAmiOASAFbXX25Kfu6f3vCcd/ePrxVe3vT7320nv+0v6/v7Xv+8ffdc/ZY1qA8AAGAiVGvzdvbYEMa5XDsALKjqBUn+8Lwb3OSTL3jA/zny40fc4SYHXHFZfvHz78tvfHzHnv2vuvyi61195X3T2hfWulQAAIC9GWe+JrgUXAIwalUHJjk+yb2SbEnypSSnprVzBto8IsnvJrn37Kbdtenyau3Vm9JekNZ2rW7RAAAASye4HBPBJQAjVVXpwsj/25IDLtnvgC9dvmWfusmlF99iU9uzTyWnJnlKWrt84Jg7JjkqyRVJPh6/jwAAgCkiuBwTwSUAI1X1/CT/99OH3nrHkx727LvvOvjQw5LkBldcml//+I7vPfO9r9l/c9vz7iS/lNauWttiAQAAVk5wOSaCSwBGpurOST75gVvc+bTjf+2Fvza7daBF+8mZj+e1b3hOq+RJae3la1AlAADASI0zX7OqOACMxpNacu6jH/nHP90/rjn76/1b75b3HH33K1ryO/2wcgAAABYguASA0filLxxyy/devXnLkbluaDmrXn/XB1+/kjunm9cSAACABQguAWA0Djrn4JvtXqzRtw+40TXtx1sOAADAdBNcAsBoXLD1u+cesFijW3zvm7P//M54ywEAAJhugksAGI033OrCc47d/8rLzkmy0Mp37Tc+tuOKlrwnrZ23msUBAABMG8ElAIzGyyo54IzX/N5nq+1Jrhtetkd+6p25x7mf36+Sv12D+gAAAKZKtbZQp5D1b5zLtQOsmarDkzw+3QIwSfKZJP+Y1natXVEbRNXjkrzynANv+rE/+PmnHfW+rXe9aapy6++cnSd8+Izv/+qn/vMGSV6W5EnZyL+AAQCAdWOc+ZrgUnAJrBdVW5L8VZInteTKC69/0Oev2LLvPod+/8LbbGp79q3kFUmeltauXONK17eqX06yPcntr65NP9i9afOe/XZfdWBLvl3JXyZ5kdASAABYLwSXYyK4BNaNqkryT0ketXPrXV//Ow896QEXX+8GRyTJAVdcmkd/bMf3nrnzn26wqbW3JHlkWjeWmTHpfh7HJrlnki1JvpTkrWntijWtCwAAYMQEl2MiuATWjapfSLLjTXf6mRc/c9vTT5zdOtCiPeiLH8zL//VPK8kj0trpq14jAAAA68448zWL8wCsD09uycee+QsnPqJ/XHP21ztvd9984rDbXdGSJ612cQAAALBUgkuAaVe1KcmDP3zkj34wVUfmuqHlNS3fcOcH7VfJA1J1/VWsEAAAAJZMcAkw/a6XZPPMjQ9ftOGF+x80+88DxlkQAAAArJTgEmD6XZbk+z9y/ldvsFjDoy88N3tSV6WbfwQAAAAmluASYNp1q6y9/k7nf+VnN+++eleSeVddq7an/X+f+Perk7wprV21qjUCAADAEgkuAdaHl1Ry+L++9vc+ntaSueFla+0ZO19bR130rS2b0v5mTSoEAACAJRBcAqwHrX0yydPu/M0vP+T9f//4j/30Vz92QVpLWsu9zv7fvPL051/+1A++MUl+P6399xpXCwAAAIuqboThxlRVB6Wb5+3g1trFa10PwIpVPSLJ85PccU/qqlapza1tacmXKnl+WnvdWpcIAADA+jHOfE1wKbgE1puqSvLTSX4sSSX5TJKz0tqeNa0LAACAdWec+dqWUZ4MgAnQ/UXqPf0NAAAAppI5LgEAAACAiSO4BAAAAAAmjuASAAAAAJg4gksAAAAAYOIILgEAAACAiSO4BAAAAAAmjuASAAAAAJg4gksYlapa6xIAAAAA1gvBJaxE1W1S9f9S9c0ku1N1SarelKpjBZkAAAAAy7dlrQuAqVX16CT/mOTi3VX/9P5b3nV3kiPvcc7n7nPAVZe/O8krU/WEtLZ7bQsFAAAAmD6CS1iOqp9P8qokp97zKa955wUH3OgvkxyZJGktx3/i7Re+4J0veeym5KIkz1jDSgEAAACmUrXW1rqGNVNVB6ULlg5urV281vUwJboh4B9NctFtf+9fX3LV5n3eNLtnoFV78gffmN9772v2bEq7RVo7d/ULBQAAABivceZr5riEpbtnkrtfsXmfv7pq8z4n99vmzmdZr7nbtlyxZZ9Nu2vTb65ueQAAAADTT3AJS3e3JO1uTzvt0nTDw+ddhOeS6x1QHz7qTvXtA270M6taHQAAAMA6ILiEpduUZM/lW/Y9dLGGu2tTLt+yz/6rUBMAAADAuiK4hKX7fJLNzz7zlYfsrdF+V12Ru537hVy2z/W+tEp1AQAAAKwbgktYuvcm+eLjP/rW+yfZlWTeFa4e/pl3txtdfkkOv/iC561ibQAAAADrguASlqq1PUlesCnt4f982knvS2vJnPDyvl//VHvOu19RX73RETsPvvySL65JnQAAAABTrFqbt7PYhjDO5drZAKr+MMkLLt7vgLP/9r6/euDnbrb1hje67JI87DPvzv2++rGcf+CNP3HYJd/5ybR26VqXuqqq9kvy8CS/leR2SXYn+WiSv0vy7j74BQAAANaBceZrgkvBJStRdf8kT23JQyvZnCQX77f/F69/1RV/sc+e3f+U1q5a0/pWW9XRSf4jye1bcub/Hnrrb1y2z/Vu+CPf+uqdb3DlZUcn+c8kx8XrDQAAANYFweWYCC4ZmaobJLlJkh+ktQvWupw1UXWTJB9JsufZD3ryS0672y88I8mRSZLW8qAv/fcFL3nL9v332bP7g0l+Lq3tXsNqAQAAgBEYZ75mjksYhda+n9a+vmFDy85Tktzs//zKc1502t1+4f8lOeKaPVV55+3ue5PHPuJ510/yM0l+YY1qBAAAAKaE4BJYuaotSZ6wJ/W6d93m3n80u3Vuq/dvvVs+c7NbXbknefIqVwgAAABMGcElMApHJTn8DXd+0JfTDQ+fG1rOqh13+Kl999SmY1avNAAAAGAaCS6BUdgnSc45+KYHLtbwii37JsmWcRcEAAAATDfBJTAK5yW54tivfPTQxRre9dwv5PJ99jt3FWoCAAAAppjgEli51i5J8oa7n/v5B23ZffWuJG2+ZodeckH7+S9+INe76oq/Xt0CAQAAgGkjuARG5cWVHPGOV/7OV7fsvjqZE14efNkl7WX/+sLaXZsu2tL2nLomFQIAAABTo1qbt2PUVKiqbUn+b5I7J7k8yXtaa7+8hOMPSnJRkoNbaxePpUjYSKp+NclrL9l3//P/5icedcD7tt71hlv2XJ2f+fKH8+sf/489B175gx/su/vqY9Pa/6x1qQAAAMDKjTNfm9rgsqp+Jck/JHl2knenW+zjTq21Ny7hHIJLGLWqeyV5ZkseVv0iPLtr02XV2qmb0l6U1r62xhUCAAAAIyK4nKOqtiSZSfLc1to/ruA8Gyu4rLplkicmeWiSg5J8J8kbk/xjWjt/LUtjHaq6aZKtSXYn+WJa+/7aFgQAAACM2jjztWmd4/LuSY5IsqeqPl5V51XVf1TVnfZ2UFXtV1UHzd6SHLgq1U6Cqqcl+WpLfmfmhod97b+PutNHdh10s2+15DlJZvohvjA6rX07rX0krX1MaAkAAAAs1Za1LmCZbtXfPy/JM9L1vvzdJGdV1e1aaxcucNyzkjx37NVNmqrfTHLKJ29+27f+2q+98B6X7nv9bbO7bnTpReee/rrf//KtLzzntFRdnNb+Yw0rBQAAAIAkEzZUvKq2J/mDRZrdMV2Py9cleWJr7eX9sfsl2ZXkj1prL1vg/Psl2W9g04H9Met3qHjV9ZLs+tJNjvrUA//PS++fqiSpgRZt057def/fPf5Th33/O/sm+dFM0pMCAAAAgIm1kYaK/790weTebl9Ncl7f/rOzB7bWruj33WKhk7fWrmitXTx7S3LJOL6ICfPIJDf57V9+9h3nCS2TpPZs2pw/+PmnHZbu+3u/1S4QAAAAAOaaqKHirbVvJ/n2Yu2q6n+SXJHk9kne12/bJ91CIF8fY4nT6L7f3/f6X/3KIUfdai9t6r1H3/1mV23afMk+e3bfN8lZq1QbAAAAAMxrooLLYbXWLq6qv0/y/Ko6O11Y+cx+95vWrrKJtM/lW/ZdfOh3Va7etHn3Pnt277sKNQEAAADAXk1lcNl7ZpKrk7wmyfWTfCjJA1pr313TqibP12502SXHH3DFpfnBfvsv2OjIi87P9a6+8uAkX1u90gAAAABgfpM2x+XQWmtXtdZ+r7V2aGvtoNbaA1trn1nruibQqze1Pfv+2iff8b0kC/W8bI//yFsuSXJxkn9ZtcoAAAAAYAHT3OOSYbS2q6pOO+msVz3i44ffPv9z5I+09Av0HPW9b+bu53y+3e3cz+fRH/u3Ayr5s7T2gzWuGAAAAABSrS0+/eF6Nc7l2idK1QFJduxJ/eS/3/4nr3z/Le+8/y9/9j25167PzO1y++UkL0ryD9nITwwAAAAAhjLOfE1wuRGCyySp2i/JU1vy9EoOn938/X2uN3PW0Xd/9/kHHfKJx330bT+5Ke1Xk5yc5BnCSwAAAAD2RnA5JhsquEyS7uv9WpIvv/Tex/37W37k/r/9hZve8uapmm2x6+X/8if/8aAvf+i3kjwqrb1hzWoFAAAAYOIJLsdkAwaXv5PklF9/5J/89vuOvtvLZ7cOtGhJ8sGXPOZTh33/O5eltfuueo0AAAAATI1x5mtTu6o4y/Ibe5K3ve/ouz23f1xz9leSvPDYxx+R5D6pus2qVgcAAAAAPcHlxnLY5252q+8nOTLXDS1n1WdvdqtDZtuvTlkAAAAAcG2Cy43l0j1Vhy7W6OArvn9N+/GWAwAAAADzE1xuLO+8/be/fq99r75qr40e+tmzcuXmLd9N8unVKQsAAAAArk1wubH83b57rr7hEz90+kXpF+KZ61bf2dWO+/S72uY9u/8urV25yvUBAAAAQBLB5cbS2ueT/Pnvvu91Bz9952vroMu/f014WW1Pjv3yh9s/v/5ZtXvTpl2bW3vRGlYKAAAAwAZXrc3b8W5DGOdy7ROrqpI8b0/qD6/Yss+m9229a12xed/82Plfzi2/9818+4AbfvamP/jeA9La+WtdKgAAAACTbZz5muByowWXs6puvrs2/daF1z9o21Wbtxxw5eZ9vn7o9y984fWvvuKD2chPCgAAAACGJrgckw0dXAIAAADACo0zXzPHJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATBzBJQAAAAAwcQSXAAAAAMDEEVwCAAAAABNHcAkAAAAATJwta10AY1Z15yS3SXJ1kv9Ja+escUUAAAAAsCg9Lterqoen6sNJPpnkX5K8JcnXU3VGqn5sbYsDAAAAgL3T43I9qvqDJNuT/NeVm7c87NGP+OO2p+rwEz7wz7f7ia9/8hcq+UCqtqW19651qQAAAAAwn2qtrXUNa6aqDkpyUZKDW2sXr3U9I1H1wCTvTPInt3rmWz6xZ9PmU5IcObt7/ysvO+fMf3jihYd+/8Ijk9w6rX13rUoFAAAAYLqNM18zVHz9eXqSj/ah5elJjhjceem+1z9822NPudOe1IFJHrMmFQIAAADAIgSX60nVoUkefNWmzX/f97RMkprb6oIDbpT/vO29r2zJY1e3QAAAAAAYjuByfTk8Sb30Po/ckm54+NzQclZ98rDb7X/1ps1Hr15pAAAAADA8i/OsL5cnyZ6qo25+8QW58ze/lC17dufrNzosn7nZrZL6YY55gysvzVWbtly9z5qVCgAAAAALszjPelqcp2pLknMu37Lvnn12X33zzW3PNbv+99Bb5+/ufVx23PGYVNuT97zst7Kptf888qLzH7R2BQMAAAAwzcaZr+lxub78ZJIb7nf1lfu85D6PuPjV9/ilA6/Ysk/d/ZzP5bH/8295yVv/PFu/e26+dYMbt1tcdH5dtmW/5611wQAAAAAwHz0u10uPy6obJflqkk9cvO/1b3P5Ptc7/Om/+Lub3n/Lu3RDxFvLM3a+Nk/74BtydW3K1258xJm3/c7ZP5ON/AQAAAAAYEX0uGQYj01ywCN/bftrvnbjI17x8jNeUK97wx/lSzc5Kh87/A7ZZ8/Vuf9XPpokuXLzPt+57XfO/jmhJQAAAACTSo/L9dPj8uN7Ul+81R+87SeSHJHW6j5nfzqP+uQ7svW75+WqTVvyycNum0v22/97T3//62+Y5PC0dt4aVw0AAADAFNPjkmEc8cnDbvvRJEcmSary37e4c/77Fne+VqM7ffPLN3z6+1+fJIcnEVwCAAAAMJE2rXUBjMzluzdtPnSxRgdecensPy8bbzkAAAAAsHyCy/Xj3Xc6/yv32rRn914bPeRz78kVm/e5IMkXV6csAAAAAFi6qQ0uq+p2VfWWqrqgqi6uqvdV1bFrXdcaeun1rr7y5r/+8X//bpJ5Jy699XfObg/7zJlty57df5vWrl7l+gAAAABgaFMbXCb5t3RzdD4gyT2SfDLJv1XVzde0qrXS2oeTvOJ5//Wygx//kTdnv6uuaAP7cszXPtZe989/WFdt3rJrc9tz8prVCQAAAABDmMpVxavqkCTfTvLTrbWd/bYDk1yc5IGttf8a8jzrZ1XxJKnakuTkljz54v0OaO+51T02Xbl5n9zt3C/k1hfuygX7H/yFQy696P5p7ZtrXSoAAAAA02+c+dq0BpeV5HNJdiY5MckV/f0zk9yhtfbdBY7bL8l+A5sOTLIr6yW4nFV1691Vv33xfjd44NWbNu9/xZZ9Zw659HsvvN7VV74n0/gDBwAAAGAiCS7nUVVHJnlzkrsn2ZPkW0m2tdY+vpdjnpfkufPsWl/BJQAAAACsgnEGlxM1x2VVba+qtsjtDn2Py5ekCyuPSfLj6ULMt1XVYXu5xJ8lOXjgduR4vyIAAAAAYDkmqsdlVd00yU0WafbVdGHlO5PcaDDJraovJfnH1tr2Ia+3vua4BAAAAIBVNM58bcsoT7ZSrbVvp1t0Z6+qav/+n3vm7NqTCetFCgAAAAAs3bSGfB9M8t0kr66qu1TV7arqRUmOTrJjbUsDAAAAAFZqKoPL1toFSR6c5AZJ3p3ko0l+KslDW2ufXMvaAAAAAICVm6ih4kvRWvtokp9b6zoAAAAAgNGbyh6XAAAAAMD6JrgEAAAAACaO4BIAAAAAmDiCSwAAAABg4gguAQAAAICJI7gEAAAAACaO4BIAAAAAmDiCSwAAAABg4gguAQAAAICJI7gEAAAAACaO4BIAAAAAmDiCSwAAAABg4gguAQAAAICJI7gEAAAAACbOlrUuYEIcWFVrXQMAAAAATJsDx3XijR5czn5jd61pFQAAAAAw3Q5McvEoT1ittVGeb6pU183y8CSXrHUtQzowXch6ZKanZsBrF6aV1y5MJ69dmE5euzCdZl+7d0jyxTbioHFD97jsv5nnrHUdwxoYzn5Ja22kCTYwPl67MJ28dmE6ee3CdPLahek08No9b9ShZWJxHgAAAABgAgkuAQAAAICJI7icLlckeX5/D0wPr12YTl67MJ28dmE6ee3CdBrra3dDL84DAAAAAEwmPS4BAAAAgIkjuAQAAAAAJo7gEgAAAACYOIJLAAAAAGDiCC4nVFUdVlXbq+rMqrqkqlpV3X8Jxz+vP2bu7fLxVQ0kK3/99uc4oqreWFXfq6qLq+otVXWr8VQMzKqqG1bVy6vq21X1g/51fPchjz11gd+9nx933bARVNV+VfXnVXVuVV1WVR+qqgcOeazfq7BGlvva9ZkW1lZV3aCqnl9Vb6+qC/vX32OXcPyy/189aMtSD2DV3D7JHyT5UpJPJ7nvMs/zpCTfH3i8e4V1AYtb0eu3qm6Q5MwkByd5YZKrkjw9yXuq6q6tte+MtlwgSapqU5IdSe6S5EVJLkjy5CRnVdU9WmtfGuI0VyT5zTnbLhppobBxnZrkuCQnp/sd+9gk/15Vx7bW3rfQQX6vwpo7Nct47Q7wmRbWxiFJ/m+SbyT5ZJL7D3vgiP5fnURwOcn+J8lNWmsXVtVxSd60zPOc3lq7YIR1AYtb6ev3yUlum+THW2sfSZKq+o8k/5vkd5M8e5TFAtc4LslPJHlEa+30JKmqNyb5YpLnJzl+iHNc3Vp77fhKhI2pqn48yaOSPLO19pf9tn9K97vxL9K9dhfi9yqskRW+dmf5TAtr47wkh7XWvllV90zykSUcO4r/VycxVHxitdYuaa1dOIJTVVUdVFU1gnMBQxjB6/e4JB+Z/XDVn/PzSd6V5JErrQ9Y0HFJzk9yxuyG1tq3k7wxyUOrar9hTlJVm6vqoPGUCBvWcel6Wb18dkNr7fIk/5jkvlV11CLH+r0Ka2Mlr91ZPtPCGmitXdFa++YyDx/J/6sTweVG8NV0Q9QuqarXVtWha10QsLC+S/2dk3x0nt0fTnLrqjpwdauCDeNuST7WWtszZ/uHk+yf5HZDnGP/JBcnuaifC+gl/TBVYGXuluSLrbWL52z/cH9/1/kO8nsV1tyyXrtz+EwL02cU/69OYqj4evbdJH+b5IPp5ts6JsnvJPnxqrrnPL84gMlw4yT7peuWP9fstsOTfGHVKoKN47Ak751n++Br79N7Of68dMPePpbuj8MPTjdE9S5Vdf/W2tUjrBU2msOy+O/G+fi9Cmtrua/dxGdamGYr/X/1NQSXq6D/S+++Qza/orXWVnrN1topczb9S1V9OMnr0n2I2r7Sa8BGsAav3+vPnmuefZfPaQMsYJmv3etnBa+91tqz5mz656r6YpI/TTdc5p+HrAe4ruW+Pv1ehbW17N+tPtPCVFvR/6sHGSq+On46yWVD3m4/riJaa6cl+WaSnx3XNWAdWu3X72X9/XxzflxvThtgYct57V6W0b/2XpxkT/zuhZVa7uvT71VYWyP93eozLUyNkb329bhcHZ9P8rgh287XjX6Uzk43ZAYYzmq/fi9M95epw+bZN7vt3BFcB9a75bx2z8uIX3uttcuq6jvxuxdW6rwkR8yzfbHXp9+rsLaW+9rdG59pYfKN7P/VgstV0K/CdOpa19GvwrY1ycfXuBSYGqv9+m2t7amqTye55zy7753kq621S1arHphWy3ztfiLJMVW1ac5E4vdOcmmSLy61jn7Rj0OSfHupxwLX8okkx1bVQXPmtbv3wP7r8HsV1twnsozX7kJ8poWp8YmM6P/VhoqvA1V1i6q6w5xtN52n6ZOS3DTJ21elMGBR871+k5ye5F5Vdc+BdrdP8oAkb1rN+mCDOT3JoUkePruhqg5J8ogkb2utXTGw/dZVdeuBx9dbYGXi5ySp+N0LK3V6ks1JnjC7oar2S9ez+kOttbP7bX6vwmRZ9mvXZ1qYDlV1WFXdoar2Gdg89P+rFz3/CNaBYUyq6o/6f/5okkcleWWSryVJa+0FA+3OSnK/1loNbLs0yRvSrdJ0eZKf6s/xySQ/2Vq7dBW+BNiwVvj6PTDdX5EPTPKXSa5K8ox0/+m7a2tNzy0Yg6ranOR9Se6U5EVJLkg3+f8tktyrtfaFgbYzSdJa29o/3prudfv/t3f3wXZV5R3Hv78ELINaEl+r1QGKjLZV29oOpQ61EFQEp7QUqTOCSAVfcBzeqqVUkVQrb4paYaypIAhYMAMRlRZTnSYRcdoCVRGoAjFBkCIIIZVCkJenf6x9yOnOfctN7r0nne9n5s6+Z++111p7n3Nv9n2y1nouoU1TB9gPOID2x9Xre//bLGkzJVkKHERbO/Y24C3AHsC+VfWNrsxK/HdVGilb8LPr37TSHEvybmABLQv40cAyNo54Pruq1ie5gPZzvWtVre3Om/Jz9aR9MHA5upKM++b0fqGvZNNf8p8BXgm8kLb46e3A5cCHnQ4jzbwt+fnt9r+A9nD3Wtro+JXA8VV120z0V1KTZCHt4eqPadkOrwXeU1XX9cqthf8TuFwAnA3sSXuwm0/74+zzwEer6tHZ6L/0/1mSHYAPAYcBC4EbgJOravlQmZX476o0Uqb7s+vftNLc6555dx7n8K5VtXaswGV37pSeqyftg4FLSZIkSZIkSaPGNS4lSZIkSZIkjRwDl5IkSZIkSZJGjoFLSZIkSZIkSSPHwKUkSZIkSZKkkWPgUpIkSZIkSdLIMXApSZIkSZIkaeQYuJQkSZIkSZI0cgxcSpIkSZIkSRo5Bi4lSZIkSZIkjRwDl5IkSbMkyeIkNdf9mGlJtktyZpI7kjyR5IpufyVZPLe9kyRJ0rbCwKUkSdI0JDmiC8QNvjYkuSvJ8iTHJHn6XPdxIMmOXdB07ymW37u7pjdMs8m3Au8FLgPeAnx8mvVs85LsleSqJD/uPiM/SvKVJG+a675JkiSNuu3mugOSJEnbuA8Aa4DtgV8C9gY+AZyQ5MCqumGo7N8Ap892B4EdgVO671fOQnuLgB9X1fGz0NbISnII8AXgO8DfAuuAXYFXAW8D/mHOOidJkrQNMHApSZK0Za6qquuGXp+WZBFwJfDlJL9aVQ8DVNVjwGMTVZZkHvCUqtowYz2eec8BHpjrToyAxcDNwJ5V9fPhA0meM1udSBJgh8HnUJIkaVvhVHFJkqStrKr+BfgQsDNw2GD/WGtcdlOyz0lyaJKbgEeA13XHfjnJZ5P8JMkjSW5K8tZ+e0l26Oq+pZuO/F9JliXZLckuwL1d0VOGprYv3pxrGvQ9yYuSXJDkgSTrk5yfZMeuzC7d9e0D/PpQW3uPU+cFSdaO19YY+w9Lcn2Sh5Pcn+TSJC/slVmZ5MYkv5ZkRZKHumnaf7E5922ozLwkx3X3fkP3XixJsnAKt2034Np+0BKgqu7p9WVekmOTfK9r594kX03yO0NltktycpLV3edhbZJTk/xCr661Sa5Msl+S64CHgXd0xxYk+US3/ugjSW5LcmIXMJckSRopPqBIkiTNjIu67WunUHYRbR3ILwDHAmuTPBf4V+DVwDnd/tuA85IcNzgxyXza6M5TgOuBP6dNS94JeCktaHl0V/yLwJu7r2XTvK6lwNOBk7rvj2DjNPR7u7q/D9w51NZ/TrOtJyV5H3AhcCtwAm06/r7AN5Is6BVfCHwV+C7tfnwfOCPJ/kP1TXbfBpYAHwGuob0H5wOHAsuTbD9Jt28H9k3ygilc4nndNd0BnEhbUmADsOdQmXOBDwL/ARwPrKK9D5eOUd+LgUuAr3X9/k4XYF5FC6ZfCBzTXddpwMem0EdJkqRZ5VRxSZKkGVBVdyZZTxt1N5kXAy+rqpsHO5KcC8zv9t/X7f50kkuAxUmWdFN/D6cF8E6oquEkOKcnSVVVksuAvwNuqKqLt/DSvl1VRw7185nAkcCJVfU/wMVJjgIe3wptDdrYGfhr4P1VderQ/mXAt4F3AacOnfJ84PCquqgrdx4tiHgkcFVXZsL71p23F3AUcGhVPbkeZZIVtMDoIUy8TuUZtIDk6iTXAN8E/hn4VlU9MVTfPrQA8Cer6tih888a6stv0BIdnVtVb+uOfyrJPcB7kuxTVSuGzn0R8LqqWj7Uzvtpn8ffqqpbu91LktwFvDfJWVV1xwTXI0mSNKsccSlJkjRzHqSNTpzMql7QMsDBwFe6l88afAHLaaMCX9EVPxj4KXB2v9Kq2mS69Vbw6d7rq4FnJvnFGWhr4E9oz61Le/fibtoIzH165R8EngyadlO1/x34laEyU7lvhwDrga/12r2+a6Pfbr+ez9Km/a8E9gJOpt2vW5O8steXogVnx+vLAd22PzLyrG77+t7+NcNBy6HruRpY17uer9OC5K+a6HokSZJmmyMuJUmSZs7TgHsmLdWykg97NrAAeHv3NZZBcpfdgB90iX9mw496r9d124XAf89Qm7sDoQUpx/Jo7/WdYwRt1wEvH3o9lfu2Oy1IPN57OGmCnS54uLybpv3bwBuBdwJXJnlJt9blbsBdVXX/BFXtDDxBWy5guP67kzzQHR/W/0xBu56Xs3HN075ZSxgkSZI0FQYuJUmSZkC3ruFO9AJN4+hnex7MirkY+Nw459wwza5tqcfH2Z9p1DXeiND5vdfzurL7j9P+g73XW6uP82hBy0PHOT5eAHATVfUQbbTj1Ul+Sltbc3/Gf3/HrWqK5cbKID6PtublmeOcc8tm9kWSJGlGGbiUJEmaGW/utv3pulNxL/AzYH5VfX2SsquB302yfVX1Rx4OzMSU8a1hHW1kaV9/9OBqWtBxTVVtreDaVO7balpypGu69US3luu67fOG2tkvyTMmGHV5Oy3wuDtDyY66JE4LuuOTWQ08bQqfKUmSpJHgGpeSJElbWZJFtPUM1wCf39zzq+px4HLg4CQv7R9P8uyhl5cDzwLePUa5wQjDh7rtgs3tywxbDeyU5Mkp3EmeBxzUK7eMNorylKFrGpRPlyBoc03lvi2ljf48eYwy242RzbxfZt9xDg3Wq/zBUF/CxuzsY/Xln7rtcb0iJ3Tbf5yoL52lwO8l2W+MdhYkcVCDJEkaKT6cSJIkbZn9k7yE9lz1XGAR8BraCLgDq2rDNOv9S1ryl39L8hngZuAZtKQ8r+6+B7iQliH7Y0n2oE1HfmpX5lPAl6rq4SQ3A29McgtwP3BjVd04zb5tLZfSMm9/MckngR2Bo2lTlgfJh6iq1V1G7NOAXZJcQRuRuistyPn3wEc3s+2p3LdVSZYAJyX5TVpG8Edpox4PAY4FLpugjS8lWUNLsrR6qP4/BK7t9lNVK5JcBByTZHdaxvJ5wO8DK4Bzquq7ST4HvL0LmK4C9qBlGr+il1F8PB8BDqStr3kBLcnQU4GXAW8AdqElLJIkSRoJBi4lSZK2zAe77c9pAcHv0UbFnV9VP5tupVX1ky6g9gFaVu13AfcBNwEnDpV7PMkBwPuAN9EyVN8HfLPry8BRtAzaHweeQstgPaeBy6q6L8lBtEzZZ9JGqJ5ECwy+olf29C7oejwbRybeQQsmfnkabU/pvlXVO5NcD7wDOBV4DFhLW3/0mkmaOQr4I+BPgefTRlX+EPgwcEYvMdCf0dYtPZIWYFxPm1L+rV59PwSOoAVs76YFczfJRj7ONT+U5A+Av6IFXg+nJVS6hXZP10+lHkmSpNmSTRMuSpIkSZIkSdLcco1LSZIkSZIkSSPHwKUkSZIkSZKkkWPgUpIkSZIkSdLIMXApSZIkSZIkaeQYuJQkSZIkSZI0cgxcSpIkSZIkSRo5Bi4lSZIkSZIkjRwDl5IkSZIkSZJGjoFLSZIkSZIkSSPHwKUkSZIkSZKkkWPgUpIkSZIkSdLIMXApSZIkSZIkaeT8L+MtdMMA3OERAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAABS4AAALGCAYAAABLZg+zAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8WgzjOAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB79UlEQVR4nOzdd5xld10//td7d9OAZEMKJNkENhQp0kREQCMJIOI3Ii2iRqT9KNJMQJEFC0XKIgoJVZASBEEgIMWVopBARKR3pAWGZCGQhJQNpO9+fn+cMzCZzMxOuXfm3pnn8/G4j7v3nM855z0z997Z+5pPqdZaAAAAAABGybqVLgAAAAAAYDrBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQCrRlXtV1UvraqJqrq6qlpV3aGqju7//ayVrnG1qap7V9X/VNVF/ff43Std03RVdXpVtQGc51n913j00qsaH1W1uf+6T1npWmCqqjqlf25unrLN83WZjMLv1v76p6/U9QEYPsElACuq/9Cx5FCp93dJnpTky0lekOTZSX44oHMzTR8WvCfJkUlen+77/a+7Oebh/c/84UMvkJHT/1FhYqXrmG5KKD3X7fRpx0xMD82m7Du4qj7d739jVW2Y49qnLOS6/TF/OWX/Lebx9R1YVX/d/5Hh/Kq6qqp+XFVnVNUzquqG8/pGsSjj+r4nBAZgFMz6nygAGEO/k+SbrbX7Tt1YVbdcoXpWu3sl2TvJn7XW3rLSxczhoUmuM4DzvDxdMHvWAM41Tr6f5FZJLl7pQpbBR5OcPsu+ifmcoKqOTPKhJDdL98eULa21+fxx5j1JvrC761ZVJXlUkpakkjw6yZ/PUc/vJHlzko1Jvp3k35Kc2z/+1STPTfKMqrpZa201/KFnLT1fAWDVE1wCsJocluRjK13EGnJYf/+DFa1iN1prAwkaW2vnJzl/EOcaJ621q5J8faXrWCant9aetdiDq+oOSd6f5IZJntxaO2kBh7+7tXbKPNrdO8nmJKckuU+Sh1XVM1prV85Qz93TBZVXJ3lEkjdOD1Gr6rZJTk73R4ixt8aerwCw6hkqDsDImTo8rf/3v/bDGy+vqs/0PYimtp+cw7CS3H2uIZbTjpt12Opc8xlW1S372s6uqiur6kdV9ZaZhmxOnYOtqh5bVV/uv44fVdVrqmrjLNc/vLr5Or9VVZdV1QVV9amq+utZ2r68qr5TVVf0Q0DfW1W/MtfXP8t1H1xVH6uqi/vrfrmqnl5Ve01pc3T//X52v+m0Kd/za32/phx3epI39A/fMG047Oa+zc++71V1fFV9sqp+MvXn1A+7fGf/9V5WVTuq6uNV9ZDZrlvTpiOoKXOzVTcP6rbq5um8tKo+WlV3m+E8Mz4nJp9rVXVQ/zM9p/85fLWqHjFLTXv155v8mX23qp7bb5/3nG3TXiu3rKp398+Vn1bVf1fVvee4/pb+53tp/z08o6oePNc1pm2f93N7ynPmxkluPO1nf8qUdkdV1fuqanv/fflhVf1vVT1zPt+PlVRVx6TrsXlAkj9aYGi5EI/u7/8pyb8kOSjJA2aoZ12SV6frqHBCa+2UmXp+tta+nK739PfnumhV/WP/87rfLPt/td9/6pRtN6yqv6+qb/TPyYv6f59SVTeZ59ebqrpX//z8af/8fnfN0pN+Hs/Xm1TVk6rqS/37x+lT2hxQVS+oqv/r911cVR+e7XXUH/P7fZsL+uf/RFW9taru1O8/Pbt535vj3FNf3zetqlOre3+/pKo+VFW36dsdPOW95/Lqpik4ZpZzbqiqx/evqx396//zVfXE/jkz2e5ZSb7bP3zYtLofPsN55/U+2rfd2H+fv9HXe2FVfbCq7jVL+z2rm+rgzJr2fjnX9w+A1UGPSwBG2Y2TfCrJd5K8KV0g8PtJ3lNV92qtnda3OyXd8M5nJvle/ziZ59DOhaiq+yR5V5I9krwv3dDLw5M8MMmxVXVMa+1zMxz6d0l+qz/mQ0mOSRdA3CzJPaZd405JPpju6/1Yf73rJLl1kmcl+dspbe/Yn++A/ph3pQsy7p/kv6vqAa21/5jn1/b8JE9P16vwLUl+kuS3kzw/yW9V1b37Xl0T6ULLo5PcPckb8/Pv9URmd0qSi5LcL9ceFnvRtLZ/luQ3032/Tks3rHXSq5J8Nd335pwkByb5f0neVFW3aK1dK9ydw52S/EWSTyR5bZIbJXlQkg9X1R1aa9+Y53n2T/LxJFcmOTXJXkl+L8nrq2pXa+2Nkw2rqpK8M8mxSb6Vbgj6HkkenuQXF1D7VEf2X8OX04VVh6Z7rby/qo5vrb1tyvX3TPdcuXu6nmmvSPf8Oi7J2/qv+xkLuPZ8ntsT6Z4zJ/aPT5py/Bf6uu6TZFuSHUnemy5IOyDdsN/H5+dB+cipqt9L9x51VZJjW2v/NaTr3DDJ76abEuN/qmpHutfKY5K8bVrzuye5Rbrv4+vmOm9rbVeSXbu5/BuTPDbd1AvvmWH/w/r7U/par5PuNXHTJP+Z7vlR6d7X75fudfKd3VwzVXVcuq/tyv7+nCS/nu75/qXdHT+Dk5Mcle659h9JdvbXuXG63yObk5yR5ANJrptuCpIPVNVjW2v/NKWuShdIPizde+a7kpyX7vfBMUm+keQzWdj73mw2J/lkkv/rz7c5XVh9elXdta91R7rvzwFJ/iDda/8XpvY4r6rJ31u/1df3liSX9/W+LN3UAX/cNz893fvaCUm+mOTdU+qZ+jUkC3gfrar90z0vbp3k0+neCw5K8uAkH6qqx7XWXj2lfSV5e7rv35np3i/3TPLIJLfdzfcNgNWgtebm5ubm5rZit3TztLVp2zZPbk/yzGn7fqvf/h+znOv0GbYf3e971rTtE0kmZqnrWf0xR0/Zdv0kF6b7kHrrae1vky7o+9y07af05zkryY2mbN+QLnhrSe48Zfue6Xq5tCTHz1DX4dPO8e10HzzvPq3dYekCi3OS7DWPn8Ndp9R5yLRrvK/f94zdfY/mcZ2H98c8fDff958m+aVZ2tx0hm17JvlwuuBo07R9p8/wHJt8TlyrlnThTEvyyvl8vVPO89ok66dsv3W6Ibpfm9b+j/v2H0uy55Tt+6cLEmd8Hs/yvdg85fovmrbvTv3348Ik+03Z/vS+/X8k2TBl+w3SvSZakrvNcI1TlvLcnsdr7p39MbefYd9B832OLeY25Wd7ev/vmW53meFraemC251JfpTklxdx7cnv47tnue7+U9pu6ds+fcq2z6QLHW827bx/3bd98wC/T99IckWSA6Zt3yvJBf33YEO/7b799V8yw3n2TLLvPK53vSQ/7p/Hd5q27yVTnvubF/B8/X6SI2e41un99/EPpm3fP11Qd1mSG07Z/pj+fJ9KsnHaMeuTHDrl8cMzx/veHF//5NfSkvzlLD/fC5L8Y5J1U/ZNvse8ZNoxz+q3vyzXfK9any7cbknut7vv5ZT9R0+p7+HT9s32Pvrqfvurk9SU7TdPNy/pFdN+nsf37T+RZO8p2w9IF2TO+/3Szc3NzW08b4aKAzDKvpdu4Yifaa19MF1QcucVqOeh6T7EPrO19rVpdX0l3dDNX6qqW89w7HPalJ4vrbWr8/Phg1O/lvum+7D43jbDgjette1THh6brjfTy1prH53W7gfpApVDktxzHl/bI/v757YpC3T0df5Zug/0j5rHeQblNa21z8+0o7V25gzbrkzXc3BD5vf1Tvp4u/a8gq9PFzgu5Dl2aZKntNZ2Tqnpa+l6Ft2qqq43pe3D+vu/alPmJWytXZQpvWkX6OIkz5m6obX2mXRDiffPNYcSPzLdh/2n9D/fyfbnTrn+Qn7W831uz9dl0ze0bn7R5XD3dD23Z7rdZZZjnppu+qXjWmufXcK17zfLdfdPrrEoz64k/zzluFPy80V6pjq0v9+ewXljutDxD6dtv2+6P+z8y9TnVG+mn+eVrbVL5nG9+6ULqN7SP5+nelYWtwDP37XWvjt1Q1XdPt3P/p2ttX+dVutF6X4Oe6frRTjpSf39Y1trF087Zmdr7ZxF1DabiSRbp22b7MW9V5Kntq7X7KS3pHsPu8Pkhn4Y+JOS/DDd/KtT36t2pnufb0n+aBH1zet9tO/t/ZB0f+R7emutTanhW0lemu759dAp55mcbuMZrbXLp7S/IIt/vwRgjBgqDsAo+8LUD1dTnJ2uh+Bym7zm7fv5v6b7hf7+Vkm+Nm3f9A/dSfd1JN0H/kmT4cj7F1DPjWep5+ZT6tndcPE79vcfmb6jtfbNqtqe5Miq2jj9Q/qQfGq2HVV1oyRPSxdQ3ijJPtOabFrAda71c2mtXVVVP8o1fy67863W2o4Ztk/9Gf+k//cvpQuf/meG9v+9gGtO9blZgqDT0wWlv5TkjVW1b7oh3N9vrc20gMnkz/+XFnDt+T63d+df0k258Mmqelu6KQI+Pi2sn1U/BPXEGXad1IdP8/HstvDFeT6Yrif466vqHq21s6furKoT04ePU7y7tfaFadseMUP4M9U90v2h4oOttanzUb4lyT8keXhV/VXrFqcZln9OFxY9LN0fCiZNhvGnTNn20XS9G7f0U1r8R7ogf7b39ZlMvi99dPqO1trFVfWFdIHjQsz03jL5XrpxlvfSg/v7WyVJVV03XS/7H832B5YBm+l7Nrko2jenv/Zbazv797DDp2z+hXQh8LeS/FWXg1/LZem/xgWa7/voLdJNS/HxPnic7iNJ/irXfP+5Y7r3y5neG09fRK0AjBnBJQCj7KJZtl+dlVlg7sD+fnrPpumuN8O2i2bYNtkzaf2Ubfv393MulDGtnt9bRD3TbezvZ+sldE66kHD/LK6X00L9cKaN1S3o8al0H4bPSDen4sXphupuThegLGTBhotm2X51rvlzWcp5Mu1cG5NcMEPPtKQbarsYsx03+X3cOO1+rp9zcu2gbS4XzbBtpq97Tq21d1W38NafpesV+tgkqarPpuud9Z+7OcX+6XrGTXfKLDUOyuPS9bp8XJIz+vBy6tyNJ6ab13GqiVx7nsDdeUx/f8rUja21C6rqfel6A94v3dyRyc9/lgsJ8ufUWtteVR9O8ptVdavW2v9V1Q3SrW7+hdbal6a03VFVd0k3N+nvpgt3k+T8qnplut7duwtZJ5+vu3t+L8RMx0y+l/5mf5vN5Hvp/v39fN6nB+Fa77mttav78HG29+Or082dO2nya7x5Zn6dTJrP74vpLpqjhunvfcnC3n8m3y9neq4s5ucPwJgRXAKwlu1KNyxtJvvPsG3yA+Ltp35AH7CL+vv5hA2T9dyvtfbeJV538lyHpJs3bLpDp7UbtjbL9qek+wB+rd5pVfWH+XnPr1G2I8kBVbVhhvDyhos852zHHdLfXzzt/pAZ2ibL/3O+htbatiTb+h5tv5puYZTHJfn3qvql6VM0TDt2It2Q6eXWWmuPr6rL0j0/P1ZV92z9giSttc1LvUBVHZxuwa0keWtVvXWWpo/Jz4PLyR5qR1fV+gX0ctydN6YL9x6Wbs7NP0r3meKN0xv2vWX/v36Y+63T9Rp9QpK/SffHp90tpDX5PNzd83shZnpvmbzOCa21l87jHBf19wMLhZfB5Nf4b621B65wDQt5/7k43fvlHjOEl4v5+QMwZsxxCcBadmGSG/YrrU53pxm2/W9/f9TwSvrZNX57AW0HUc/kcMejp++oqpulG3L43QUMuZ3NZHiykN6MU92sv3/nDPsWOmR0pXw+3f/B7jbDvl9f5Dnv2A8Dn+7oKddMP6T0zCSbqurmM7Q/pr//3CLr2J2dmcfPvrX209baR1prT0m3qv2emd9rYsW01v4syfPShVkfrapBrnj8sHTfg8+mW0Rlptt5Se5VVUf2x3w03WI6h+fn8wTOqKrWzfI+OJN3pQvfH9LPm/iwdD3rrjUn76TW+Wpr7WX5eY/G+8/jWpPPw2u9tqtqY6bM4bhEC3ovba39NMlX0v3+mM+0Ckt93xuEr6cLXO+ygJ/1oOv+Rrr5gG/fT+0w3UzvP59L934503vj0QOqC4ARJrgEYC37VLqeQtf4UF9VD0/yazO0f0O6D37PrKprLTrSf/g/eok1vS/dMNLf7XsQTr/G1DnL3pMuhHpCVf2/mU5WVXetquvM47qv7+//qu/dNXn8+iR/n+7/DK+b11cwtx/39zda5PET/f3RUzdW1W9leRcPWorJhVWe2y9WkeRnQczueqDNZmO6Xmw/U1V3Stcb7uIk/zZl1+vT9Ux8Uf/znWx/0JTrvz7D8eMkB1fV9HlJU1W/UVUzjQaa7G136ZBqGpjW2l8l+ct0NZ/Wz+04CJPTUzy+tfaomW7pV2lO/zroF2t5bLpQ8aVV9ZCaYWLDfjGxD2WevQdba5cleXvf/slJbp/kP/rFnaae9xeraqaekgv5eb4n3R+Yju+fz1M9Kz8ferwk/cI/ZyR5YFU9cqY2VXXbflj8pMmema/uX7tT266rqkOnbFrq+96S9b27X5auV+NLZ3kNHjptcbkL0/VQHUjd/WJk/5Jk30xbWKeqbprkT9OtIP+mKbsmF/p6XlXtPaX9AenmwwRglTNUHIC17GXpQstXVdU90y0ocod0CzX8e7phqj/TWvtxVR2XLgT6336ut6+m+2B3RH/cgelWn12U1tqVVfV76YKEt1TVY9P1Bto73aIJ90z/+7tf/OCB6RYH2VZV/5Nu3rxL+3p+JclN0n1QnTMkaK39T1X9XZK/SPKVqjo1yU/T9XK7Tbphpy9a7Nc1xSf6Wk6sqgPz8znKXjbPRX9eme5n9o6+xh/09d0nXZjy+wOocdj+OckfpKv5K1X13nRz0T0oyafTLWCxa/bDZ/SxJI+qql9NtwDKoem+F+vSrXo8deGgv0/3c71fki9W1X+kWzDj95LcIN2qy4tdJGh3PpzuefmBqvpYkiuSfLG19r50QdCmqvp4uoD6yiS/nG548feS/OuMZxyso2dZnCVJLmqtnbS7E7TWnl9VlyZ5SZKPVNV9Wmv/u7vjZtP/MeQXkny5tTbrolXp/rDwl0keUVXPbK1d3Vr7aP8e8ab+9tdVdXq63pkb0/Us/9V0r/Vrrf49hzemC0hfMOXxdL+ZLhz/RJJvJjk3Xe/P+6V7fu/2/aS19pOqekySt6WbP/Rt6eZB/PV0r/uPJfmNBdQ9l+PTLQ7zuqr60ySfTPeHqsOT3K6/3l37ryNJXpuuh+YfJ/lWVb0n3ff1sHTP2denC1eTpb/vDcrfpgua/yTJfavqI+nm6bxBurkvfy3dc+hryc++/59MclRV/Uu6n+POJO9dwnQpW9J9355YVb+SbhGug5I8OF2g+cRpq76/Nd172e+me798T7r3y+PSvV/edJF1ADAmBJcArFmtta9V1b3SDUW9b7qeSWek+3D6wEwLLvtjPlxVt0vy5+kWmzgqXcDyg3QfemcawrzQuj5TVXdI9wHvt9MNKb4kybczrVdda+1LVXX7dHPr/U66UG9Xug/3n0+3CMP587zu06rq80memOSh6T4cnpmuV8s/9L1llvq1XVhVD+rreniS6/a73px5zKvYf73HJHlukmPT/V/mi+l+XhdlDILL1lqrqgckeUa60ONJ6X5eb0wXzN4/3VDchfhuujBia3+/V7ohls9prX1w2vWvrKrfTPecOb6//tXpvo8nttZmmz9xEJ6bbv7Y+6YLSdan+7rfl+51+IB0Ydq90j2Pz+q3n9Rau3CIdU26e2afcuB7SU6az0laayf1c16+Ksl/VtXvtNautTL2PE32tnztbq45UVX/lS4wvG/6Xrattff1vdken+795Lgk+6V7T/l6uveU10zvMbmba/13VX073dQNF6T7Q890H0zXU+830oWV+6V7nv9nkhe31v5nntc6taruk+4948Hpwu6PpXuf3pIBBZf9wkO/nO718KB0vZXXpwsZv5buD11fntK+JXloVX0w3dyiD073ujsn3e+R905pu6T3vUHp/9h1/yQP6ev4nXSL8ZyX7j3kr9P1iJzqj9OF8PdJ8ofpevVuT7Ko4LJfTOquSZ6e7n37KelC808leVFr7UPT2rf+j3lb+pqfmO57/IYkz0ly+WLqAGB8VPc7FwCAldYHih9KsrW19vR5tN+cLnB4Y2vt4cOtDgAAlpc5LgEAlllVHTbDtgPT9ZhMrjknJQAArEmGigMALL8X90P8/yfdMM3D0w3jPSDJq3czlyEAAKwJgksAgOX3rnSrK9833ZyPl6db6Ol1Gczq7QAAMPbMcQkAAAAAjBxzXAIAAAAAI0dwCQAAAACMnDU9x2VVVZLDklyy0rUAAAAAwJjaN8kP2oDnpFzTwWW60HL7ShcBAAAAAGPu8CTfH+QJ13pwOdnT8vDodQkAAAAAC7Vvuo6BA8/W1npwOemS1tqOlS4CAAAAAMZJNxPjcFicBwAAAAAYOYJLAAAAAGDkjHVwWVWbqurNVfXjqrqsqr5cVXda6boAAAAAgKUZ2zkuq+r6ST6e5LQkv53kvCQ3T3LhStYFAAAAACzd2AaXSZ6W5OzW2iOmbPvuShUDAAAAAAzOOA8V/90kn6mqd1TVuVX1+ap69FwHVNVeVbXf5C3dcu0AAAAAwIgZ5+DyJkkel+RbSX4ryauSvLSqHjbHMU9PcvGU2/ZhFwkAAAAALFy11la6hkWpqiuTfKa1drcp216a5Fdaa3ed5Zi9kuw1ZdO+6cLLja21HcOsFwAAAABWm35U88UZQr42znNcnpPka9O2/V+SB812QGvtiiRXTD6uquFUBgAAAAAsyTgPFf94kltM2/YLSb63ArUAAAAAAAM0zsHlS5LcpaqeUVU3q6rjkzwmyStWuC4AAAAAYInGNrhsrX06yQOS/GGSryT56yQnttb+ZUULAwAAAACWbGwX5xmEYU4eCgAAAACr3TDztbHtcQkAAAAArF6CSwAAAABg5AguAQAAAICRI7gEAAAAAEaO4BIAAAAAGDmCSwAAAABg5AguAQAAAICRs2GlCwAAAACA1Wbzlm3rkxyV5NAk5yQ5Y2LrsTtXtqrxosclAAAAAAzQ5i3bHphkIslpSd7S30/025knwSUAAAAADEgfTp6aZNO0XZuSnCq8nD/BJQAAAAAMQD88/OT+YU3bPfn4pL4duyG4BAAAAIDBOCrJ4bl2aDmpkhzRt2M3BJcAAAAAMBiHDrjdmia4BAAAAIDBOGfA7dY0wSUAAAAADMYZSbYnabPsb0nO7tuxG4JLAAAAABiAia3H7kxyQv9weng5+fjEvh27IbgEAAAAgAGZ2Hrsu5Icl+T703ZtT3Jcv595qNZm67m6+lXVfkkuTrKxtbZjpesBAAAAYHXYvGXb+nSrhx+abk7LM1ZjT8th5muCS8ElAAAAACzKMPM1Q8UBAAAAgJEjuAQAAAAARo7gEgAAAAAYOYJLAAAAAGDkCC4BAAAAgJEjuAQAAAAARo7gEgAAAAAYOYJLAAAAAGDkCC4BAAAAgJEjuAQAAAAARo7gEgAAAAAYOYJLAAAAAGDkCC4BAAAAgJEjuAQAAAAARo7gEgAAAAAYOYJLAAAAAGDkCC4BAAAAgJEjuAQAAAAARo7gEgAAAAAYOYJLAAAAAGDkCC4BAAAAgJEjuAQAAAAARo7gEgAAAAAYOYJLAAAAAGDkbFjpAgAAAABYnM1btq1PclSSQ5Ock+SMia3H7lzZqmAw9LgEAAAAGEObt2x7YJKJJKcleUt/P9Fvh7EnuAQAAAAYM304eWqSTdN2bUpyqvCS1UBwCQAAADBG+uHhJ/cPa9ruyccn9e1gbAkuAQAAAMbLUUkOz7VDy0mV5Ii+HYwtwSUAAADAeDl0wO1gJAkuAQAAAMbLOQNuByNJcAkAAAAwXs5Isj1Jm2V/S3J23w7GluASAAAAYIxMbD12Z5IT+ofTw8vJxyf27WBsCS4BAAAAxszE1mPfleS4JN+ftmt7kuP6/TDWqrXZehWvflW1X5KLk2xsre1Y6XoAAAAAFmLzlm3r060efmi6OS3P0NOS5TTMfE1wKbgEAAAAgEUZZr62YZAnAwAAAIBxpyfraDDHJQAAAAD0Nm/Z9sAkE0lOS/KW/n6i384yElwCAAAAQH4WWp6aZNO0XZuSnCq8XF6CSwAAAADWvH54+Mn9w5q2e/LxSX07loHgEgAAAAC6OS0Pz7VDy0mV5Ii+HctAcAkAAAAA3UI8g2zHEgkuAQAAAKBbPXyQ7VgiwSUAAAAAJGck2Z6kzbK/JTm7b8cyEFwCAAAAsOZNbD12Z5IT+ofTw8vJxyf27VgGgksAAAAASDKx9dh3JTkuyfen7dqe5Lh+P8ukWput9+vqV1X7Jbk4ycbW2o6VrgcAAACAlbd5y7b16VYPPzTdnJZn6Gk5s2Hma4JLwSUAAAAALMow8zVDxQEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOSsmuCyqrZUVauqk1a6FgAAAABgaVZFcFlVv5LksUm+tNK1AAAAAABLN/bBZVVdL8m/JHl0kgtXuBwAAAAAYADGPrhM8ook21pr/7W7hlW1V1XtN3lLsu/wywMAAAAAFmrDShewFFX1B0numORX5nnI05M8c3gVAQAAAACDMLY9LqvqiCQnJ/mj1trl8zzsBUk2TrkdPqTyAAAAAIAlqNbaStewKFV1/yT/lmTnlM3rk7Qku5Ls1VrbOcOhU8+xX5KLk2xsre0YUqkAAAAAsCoNM18b56HiH05y22nb3pDk60leuLvQEgAAAAAYXWMbXLbWLknylanbquqnSX7cWvvKzEcBAAAAAONgbOe4BAAAAABWr7HtcTmT1trRK10DAAAAALB0elwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcjasdAEAAADAytu8Zdv6JEclOTTJOUnOmNh67M6VrQpYy/S4BAAAgDVu85ZtD0wykeS0JG/p7yf67QArQnAJAAAAa1gfTp6aZNO0XZuSnCq8BFaK4BIAAADWqH54+Mn9w5q2e/LxSX07gGUluAQAAIC166gkh+faoeWkSnJE3w5gWQkuAQAAYO06dMDtAAZGcAkAAABr1zkDbgcwMIJLAAAAWLvOSLI9SZtlf0tydt8OYFkJLgEAAGCNmth67M4kJ/QPp4eXk49P7NsBLCvBJQAAAKxhE1uPfVeS45J8f9qu7UmO6/cDLLtqbbbe4KtfVe2X5OIkG1trO1a6HgAAAFgpm7dsW59u9fBD081peYaelsDuDDNfE1wKLgEAAABgUYaZrxkqDgAAAACMnA0rXQAAAABLZ5gvu+M5AowbPS4BAADG3OYt2x6YZCLJaUne0t9P9NvBcwQYS+a4NMclAAAwxvrg6dT+YU3ZNflhb82sCq1H4cw8R4BhsjjPkAguAQCAcdYHdRNJNuWagdSklmR7kiNXe4DXh3MnJzl8yubtSU5Yy6Gc5wgwbBbnAQAAYCZHpQvqZgqk0m8/om+3ak3pUbhp2q5NSU5d48OhPUeAsSW4BAAAGF+HDrjd2Ol7FJ7cP5wezk0+Pqlvtxat+ecIML6sKg4AADC+zhlwu3E02aNwNlN7FJ6+HAUN0yLm8fQcAcaWHpcAAADj64x08xPOtnhBS3J23261WjM9Che5MrjnCDC2BJcAAABjqu9pd0L/cHowNfn4xFW+6Mqa6FG42Hk8PUeAcWZVcauKAwAAY26WFbXPThdIreoVtcdt1exFDPUeyNe4lp8jwHBZVRwAAIBZ9cHT5iTHJDm+vz9yLQRS49SjcJFDvZMBrAy+lp8jwPjS41KPSwAAgLE36j0Kpwz1Tq4ZQE5+KD9utjo3b9n2h+mCzt05fmLrsW9dfJUAC6fHJQAAAMxhlHsU9kO9T+4fTu81Ofn4pL7dTNbEPJ4A021Y6QIAAABgEPrh4Kcvx7UWOFfl5FDv2Uwd6n36DPsnVwbf3RyXVgYHVhU9LgEAAGABFjFX5WHzPPWhM20cp3k8AQZpbIPLqnp6VX26qi6pqnOr6t1VdYuVrgsAAIDVa8pclZum7dqU5NTp4WX/+KR5nn7Wod79kPfjknx/2q7tmWN+TIBxNraL81TVB5L8a5JPpxvy/vwkt0ly69baT+d5DovzAAAAMC/98PCJ7H7I9pETW4/dOceCPHMeN48a5jtEHWDohpmvjW1wOV1VHZzk3CR3b619bJ7HCC4BAACYl81bth2dblj47hyTbr7Jicweck7a7ariC6hPqAksO6uKz8/G/v6C2RpU1V5Vtd/kLcm+y1MaAAAAq8CMc1DO0m5yQZ65QsskOS+DCS0XOu8mwMhbFauKV9W6dHOGfLy19pU5mj49yTOXpSgAAACGbpl7Gc46B+UM7eYbcj55QKHlqTPsmpx30xyYwFhaFUPFq+pVSX47ya+31rbP0W6vJHtN2bRvunlEDBUHAAAYM31gd3K6no2Ttic5YT5B3UJDz4XMcdmfd17Dyie2Hnv6PNotuSbDxoFhMFR8DlX18iS/k+SYuULLJGmtXdFa2zF5S3LJshQJAADAQC10de9Zjp/IAoZW98HfCf3D6b2AJh+f2Lc7I11gOFtvoZbk7L7dUuxuSHolOaJvBzBWxja4rM7LkzwgyT1aa99d6ZoAAAAYvr6X4cn9w+mB3eTjk/p2Mx2/6NCz78l5XJLvT9u1PVPmqlxgyLkUC5l3E2CsjG1wmeQVSR6S5Pgkl1TVIf1tnxWuCwAAgOFadC/DpYaeyc/Cy83pVg8/vr8/cvrw9PmGnEu0kHk3AcbKOC/O87j+/vRp2x+R5JRlrQQAAGBELPNiNStlKb0MJ0PP2UwNPU+frVH/PZ11/5R279q8Zdt7MryfyeSQ9N3NcbnUIekAy25sg8vW2mx/WQMAAFiTZlusZvOWbfNarGaMLKWX4bIPrZ5vyLnYc2/esu2EdEPfW64ZXg5ySDrAshvnoeIAAAD0lrpYzZhZysI3q25o9TINSQdYdoJLAACAMTeIeRvHyRIXvlmu1b6X1Xzn3QQYJ9XabO/Vq19V7Zfk4iQbW2s7VroeAACAxdi8ZdvRSU6bR9NjJrYee/o8zznyc2XOMjT+7HSh5ayB3ZTeqcnMQ6v1UgSYp2Hma3pcAgAAjL+BztvYB3sT6cLQt/T3E6M23HyxvQwNrQYYD3pc6nEJAACMuUH2uFxLvRHHoVcpwKgbZr4muBRcAgAAY64P4CbSLcQzfY7LpAsdt6frjThrMDeo8wCwdhgqDgAAwKyWuFjNVEelmy9yptAy/fYj+nYAMFSCSwAAgFVgQPM2DnSuTABYig0rXQAAAACDMbH12Hdt3rLtPVn8vI3nDLjdUJibEmBtMMelOS4BAACSjMccl/3iQSenG9I+aXuSE1bLokEA48QclwAAAAzdAOfKHIopK55vmrZrU5JT+/0ArBKCSwAAAH5mQHNlDlzfG/Tk/uH03qCTj0/q2wGwCgguAQAAuIY+nNyc5Jgkx/f3R67wUGwrngOsMRbnAQAA4Fr64eCnr3QdU1jxHGCN0eMSAACAcTAWK54DMDiCSwAAAMbBQUnmWhSoJTk7yRnLUw4Aw2aoOAAAACOtXy387Zl9fssVX/EcgMHT4xIAAICRNW018dnsSvLgFV48CIAB0+MSAACAUTa5mvhc1ic5fxlqAWAZ6XEJAADAKLOaOMAaJbgEAABglFlNHGCNMlQcAACAUXZGku1JNmXmxXlav//jm7dsOzpdz8tzkpxhoR6A8abHJQAAACOrDx9P6B+2absnH781yXeSnJbkLf39RL8aOQBjSnAJAADASOtXCz8uyfen7dqe5EVJnpquR+ZUm5KcKrwEGF/V2vQ/WK0dVbVfkouTbGyt7VjpegAAAJjd5i3b1qdbZfywJDdIcl6SlyQ5KHMPIz/SsHGA4RhmvmaOSwAAAEbelNDyd5M8JMnB8ziskhzRH3f60IoDYCgElwAAAIy0frj3yUkOX+QpDh1gOQAsE8ElAAAAI6sPLU9d4mnOGUQtACwvwSUAAABDNWWY96HpQsQz5jPnZH/cyf3Dmeaw3J3JOS7PWMSxAKwwwSUAAMAYW2wouFxmGea9ffOWbSf0q4XP5agsfnj45Eq0J47S9wOA+Vu30gUAAACwOH0oOJHktCRv6e8n+u0rbsow703Tdm1Kcuo86lzK3JTbkxw3j3AUgBEluAQAABhDAwgFh2o3w7wnH5/Ut5vNQuambEnOTfJHSY5JcqTQEmC8GSoOAAAwZuYRCrZ0oeB7BjFMepHD0Xc3zLuSHNG3O32WNmek6zm5KXPPcTk5LPxxwkqA1UOPSwAAgPEzGQrOFuZNDQWXZAnD0ec7zPtBm7dsO3qmnpd9OHpC/7BN3z+FYeEAq5DgEgAAYPzMNxRcyhyRSx2OPt9h3k/MHGFoH0Yel+T703adm+QlMSwcYNVaUnBZVXepqqdX1Uuq6ub9tutU1R2r6nqDKREAAIBp5hsKLmSOyGsYwByVk8O85+opOdWsYWgfSm5OF1Ie398fNrH12KdMbD32dKuGA6xOi5rjsqr2TPKvSe6Xn8+f8r4k30qyK8mH0v3l63mDKRMAAIApdjf3Y+v3n7GEayxpjsqJrcfu3Lxl2wnpemy2Weqcfr4kefVMc3P2j691HQBWr8X2uPzbJL+T5HFJbpEpv4Baa5cneUe6UBMAAIAB283cj5OPT1xiT8QlD0efY5j3XA5K8owFtAdglVpscPmHSV7VWntNkgtm2P9/SW6y6KoAAACY0xyh4KAWqpnvMPMbzjFcfPow75fP85wnzHVOANaGRQ0VT3KDJF+eY//OJNdZ5LkBAACYQR/mHZWul+M5Sd7T36ZuO2NAcz7ubjj6pJck+bPNW7adMFtYOjnMe/OWbUm3GM/uHJhZhqADsHYsNrg8O8kt59j/a0m+vchzAwAAME2/aM3Juea8k9uTzBoY9sdNDzt3G2xu3rJtjyRPSPKl/nq7m6NycmGd3fX0PCPdqL0D5rp+b0krogMw/qq1+S7wNuWgqmcneUqSeyf5ZpLzktyztXZaVT06yauSbGmt/f0gix20qtovycVJNrbWdqx0PQAAADPpQ8tT+4dTA8TJD3QzBoZzhJ2vSdfZ5FpB5uYt216Y5M+SLHSo9uSCQEfOFYxu3rLtr5M8Zx7nO2Zi67GnL7AGAJbZMPO1xc5x+bwk/5PkY0lOS/cL6iVVdVaSVyf5QLrhAgAAACxB32Py5P7h9F6Pk49Pmj4n5JSwc9O0Yw5PFxy+Jd3nuYm+7WRo+RdZeGg5WcvkKuNzeX6SH8+xv6Ub5beUFdEBWAUWFVy21q5Mcp8kj0jynSRfT7JXumEED09y39baIOZUAQAAWOuOShc2zjZU+1qB4W7Czukmh3k/OF1Py6Wac4h33xvzMekCymGtiA7AKrDg4LKq9qmqFyf5ndbam1tr92+t/WJr7Vattd9prf1zW8z4cwAAAGYy37kep7bbXdg51WSbf8zielpOt9vVyJdhRXQAVoEFB5ettcuSPDbJDQdfDgAAANPsNgicod1CF7apJNefZ9vZOqosaIh3H05uTnJMkuP7+yOFlgBMWuyq4p9NcptBFgIAAMCMzkjXE3FTZu5BObkoztTAcL5h52LMVkOywCHefdvTB1EUAKvPYlcVv2OS/0jyV0lOaa1dPejCloNVxQEAgHEwn1XFk7wn3RDxQ5PcPMmzF3GpXVncWghnpwst9ZYEWGOGma8tNrj8UpKD0g0XvyLdvCSXTWvWWmu3X3KFQyS4BAAAxkUfXp6cbu7KSWcnObH/9/R9CzHZa/NtSf58gcedl+Twia3HXrXIawMwxoaZry1qVfEkFyT5RpKPJflkul9wP552u2AQBQIAADD7nJD97lPTDSVfjMneLP+U5HNJ3ppkvsO9K8kNkvzaIq8NALNaVI/L1UKPSwAAYJxt3rJtfZKJzD7/5XxdkmTfKY+3J/nPJIcl+a15HH/8xNZj37qE6wMwpkaxxyUAAAAr76h0w8OXElom1wwtky4IfXiSj8/z+GEuBgTAGrXYVcVTVeuTPCTJsUlu3G/+XpJ/T/IvrbV5ryQHAADAohw6pPNWuiHkj87CVzQHgIFYVHBZVRuTfDDJr6QbUvCdftdvJnlQksdV1W8Zfg0AADB4/RDxo5LcaoiXqSRHJPmbdCuUt8y8ovmJE1uP1XEFgIFb7FDx5yX55SRPSnJwa+2OrbU7ppuU+YlJ7tS3AQAAYID61cUnkpyW5K+X4ZLfTnJcku9P2749yXH9okEAMHCLDS4fkOSVrbVXttaumtzYWruqtfaqJK9K1/MSAACAAelDy/muIN7y816RS3HObCuaCy0BGKbFznF5YJJvzLH/60kOWOS5AQAAmKYfHn5y/3A+i/FsT/KUJC9Jt4DPQl1j/sp+OPjpizgPACzKYoPLbyf53SSvnGX/7yY5c5HnBgAAWLOmzF95aLrVus/oQ8O7Z34B5N8m+cjkcZu3bNuVrpdmMnvgaf5KAEZOtbbwkQNV9fgkL0/ygSQnJflmv+sWSf40yX2SPLEfNj6yqmq/JBcn2WghIQAAYKX1Q8FPzjUDyu1J3pLkUZnfyLbjJ7Ye+9Zp5z0uXceTg6dsPjfJE5LsmuGaZ6cLLQ0FB2BOw8zXFtXjsrX2yqq6QZItSX5r2u6rkjxn1ENLAACAlTRDz8qDkrx9hqabkvzFAk59zrTrPDDdcPGpoeV5SZ4wsfXYU/s278nMvTwBYMUsqsflzw6uOijJvZLcuN/0vST/1Vo7fwC1DZ0elwAAwEqYpWflznQLqM5n/sqZTM5JeeRk6DhlMZ9k5qHgVgUHYEmGma8tKbgcd4JLAABguc0RJi7FtYLIvkfnRLoemzNd51pBJwAs1MgNFa+qeyW5R2vtGbPsf16SD7fWPrKU4gAAAMbR5i3b9kg3f+RN0y1c+oqJrcdetYiVwefrgiSPmdZ78qjMvZhPJTmib3f6AGsBgIFY7Krif53krDn2b0ryV+lWsgMAAFgzNm/Z9sIkf5Zk/ZTNf795y7YXJ/lB5rcy+EK9dIYh34fO89j5tgOAZbVukcfdNskn59j/6SS3W+S5AQAAxlIfWv5Frhlapn/81HSL5AzDs/oh6FOdM2PLa5tvOwBYVosNLvdKsudu9l9nkecGAAAYO/3w8D9bwRJO6oeiTzoj3RyWsy1s0JKc3bcDgJGz2ODyK0keMNOOqqokD0zytcUWBQAAMIaekGv3tFwuU+erTJL0C+6c0D+cHl5OPj7RwjwAjKrFBpcvS/JrVfWOqrptVW3ob7dL8o4kd+3bAAAArBU3HeK5Z+s1Od015qvs5708Lsn3p7XbnikrkAPAKFrU4jyttTdX1U3TLdLzwCS7+l3r0v1CfW5r7Y2DKREAAGA09EOxj0oXEP6o33zDdPNEfmeIlz4vyQ3m0e5a81VObD32XZu3bHtPfl73OUnO0NMSgFFXrc33D3czHNyFlw9IcpN+05lJ3t1aO3MAtQ1dVe2X5OIkG1trO1a6HgAAYHT1i9+cnNlXBd+e5LAsfmTbTP42yUeSfDxdMLop3bDw6Vp//SMFkgAsp2Hma0sKLsed4BIAAEh+trDOE5P8epKfJHlTktMmQ8A+tDy1bz5TcJh04eFs+xbqWkHkHDVMfqgz9BuAZTfywWVV3TLJ76UbdvD1JKcsVxBYVU9I8tQkhyT5YpIntdY+Nc9jBZcAALDGbd6y7UVJnpJr95S8JMnDk7wnyURm7+04DC3d3JTTh3gflOQluWavz7PTLbIjtARg2Q0zX5v3HJdV9cQkf5rkbq2186dsv2+6BXn2nNL8T6vqLlPbDUNV/X6SFyf5kySfTHJikg9W1S1aa+cO89oAAMD46ueqPDrJv6YLA2eyb7oejs/M7MPDh+WZ/f3EtGtvT/LkJOfHfJUArHLz7nFZVR9KsrO19ttTtm1Itzrd9ZI8Pslnkhyb5HlJXt5ae/LAK75mTZ9M8unW2hP7x+vS/bXxZa21rTO03yvJXlM27ZvuF78elwAAsAb0geVf9rc9d9N80iXpPjssp5ek65iRGBYOwAgbZo/LhUwafesk/ztt2zFJDk7yktbaG1trX22t/V2Styf5fwOqcUZVtWeSX07yX5PbWmu7+sd3neWwp6f7Rk7etg+zRgAAYHT0c0RemOTZmX9omSx/aJkkD+nvpw9Nn3x8Uh/CAsCqtZDg8sB0vRmnume6v/j927TtH09yoyXUNR8HJVmf5EfTtv8o3XyXM3lBko1Tbss93AMAAFgBfWj5ziw+hNyRn/d2HKaW5Nx0HURmm0+zkhyRbu5LAFi15j3HZWYOBI9Kcmm6RXGmurK/jZTW2hVJrph8XLVc82oDAADLre+ReFSSw5KctMTTfTDdYjmDXDl8uslg9F/SzWO5O4cOqQ4AGAkL6XH5mSQPq6p9k6SqfjHJnZN8sLV29bS2t8zwh2Gfn2RnkhtO237DJD8c8rUBAIAR1vewnEhyWrog8OAlnvIf0wWX31/ieeayK8mDk7x3nu3PGWItALDiFhJcPjvJjZN8q6o+nG44eEs3/Hq6ByT5n6WXN7vW2pVJPptuuHqSny3Oc88knxjmtQEAgNHVh5anJtk0oFOen+Sj/WI4m9N95rggsw8db+nm1L9ggddZ31/rjHQdQeY6/9l9OwBYteYdXLbWvpzkHunCwsPSLdTz/1prn53arqqOTjd8/B0Dq3J2L07y6Kp6WFXdKsmrklw3yRuW4doAAMCI6YeHn9w/HNSQ7sdObD12Z5JMbD1258TWYz+S5NH9vunh4uTjRya5QboFTY9P8rfzvNah/bVO2M35T5ysCQBWq2ptOeaXHp6qemKSp6abf/MLSf60tfbJeR47tOXaAQCA5TFlLstD000d9ZIBnfr8dKHlu2a57gPThaRTF/08O12o+K5pbY9ON2x9d46Z2Hrs6Qs9PwCslGHma2MfXC6F4BIAAMbb5i3bjkvyyix9DsupLk1yvySnzdSrcVpQ+qN+8w3TzTl5xhzHTKQbvj5TT9CWbnj4kVOPn3atWc8PACtFcDkkgksAABhfm7dse2GSvxjgKSc/HB23wF6W25OcMEMvy+mh40FJ3t7vnhpeTl73mUm+HQElAGNEcDkkgksAABhPfU/LQc+rP+cw7CmL/iQzB48/CzznCDjfkm7Oy6nbz+/Pd+C0ttcKQwFg1Aguh0RwCQAA46fvyXhOBjc8/OVJ3pk5ejkuZKh3umHmcwWcD04XVh6a5GZJnj1H21l7fwLAKBhmvrZhkCcDAABYBkdlsHNavnNyQZzdXPPwOfZXkiOS/GmSv8rM4WalCyRfnC7gTLowNDO0n2x70uYt294zSsPGzbsJwHJZt9IFAAAALNChC2h7UX7ee3G6lm54+BkDvOaLkxwwx/7JgPOo/DwMnSnknN52JPRD4CfSrZD+lv5+ot8OAAO16OCyqvarqi1V9cGq+nxV3bnffkBVPaWqbja4MgEAAH7mnHm2OzfJo/t/Tw8vJx+fOM/egvO95nwdmvmHoQsJaodmyhyfm6bt2pTkVOElAIO2qOCyqg5P8vkkz0n3F8LbJblekrTWLkjy2CRPGlCNAAAAU52Rbj7J3U3Y/4SJrceemuS4JN+ftm97FjZ/5HyvOV/nZP5h6KBD0wXrh4ef3D+caVh70g1rX798VQGw2i12jssXJdk3yR3S/RXz3Gn7353kdxZdFQAAsCbNZ/7Eia3H7ty8ZdsJ6Xr/tcw81Prv+tAyE1uPfdfmLdveM9d5d3fdeV5zPiYX8Zkcnr49u1/wZz5D2YdtvnN8HpXk9OUoCIDVb7FDxe+d5KWtta9l5r84fifdLy0AAIB5Wcj8iX1PyZl6Up6b5Pcmth77tGntd05sPfb0ia3HvrW/nxpazuu6c1xzvq4xPL2v4YRp+2Zsu8jrDdJYDWsHYHVYbHC5T5Lz5ti/7yLPCwAArEGLmT+xDxI3JzkmyfH9/WGTPS2Hcd0Zrvnk+V4rMwxPnyMMXehQ9mEbm2HtAKwe1drCp2ipqs8k+UZr7Y+q6sB0Iea9Wmsf6ff/d5KdrbW7D7TaAauq/ZJcnGRja23HStcDAABrUT9MeyK7HzJ9ZP94zqHkw7jubNeY5zkuSPLgJB/dzXkG8nUNwyC+VwCsTsPM1xY7x+VJSd5YVV9K8o5+27p+JfFnJrlrkgctvTwAAGANmO/8ic9I8phpbbdv3rLthEX2TFzyvI27mftyspfIYya2HvuRuQrpw74ZrzEK5vl1jsqwdgBWiUUNFW+tvTnJ3yR5bpJv9ps/kOQbSf4gyTNaa+8eRIEAAMCqN995EZ+dBQwlH+B152w3RsO9l2StfJ0AjI7FznGZ1trzktw0yVOTvCrJPyV5WpJbtNZeOJjyAACANWAh8yJOH6Y8+fikfjjzMK6723azzLd55GoL89bK1wnAaFjsUPEkSWvtrCQvGVAtAADA2nRGul57s82fOGm2fbsd0r3I607O23jGfE426sO9B2WtfJ0ArLxF9bisqjtW1ePn2P/4qrrDoqsCAADWjD4Ie3LmDi3nY75Dv6de94T+4fRVS83bCAArbLFDxZ+X5F5z7L9HuvkvAQAA5uP8AZxjIUPOk8w5b+N5SR5sCDQArJzFBpe/nLmHS5yR5E6LPDcAALD2LKi35DQtydmZ55Du6fpw8snpwspJN0jykkUu+gMADMBi57jcN8nVc+zflWTjIs8NAACMiX5BnKPSBY/nJDljkUOr59tbsuWaQ8qXPKS7DyffPsOuyRXLrZgNACtgsT0uv5Xk3nPsv0+S7yzy3AAAwBjoA7+JJKcleUt/P7HIXoqTC+VMn2tyUks3nHz6kO7tSRYdLPbB68n9w0GuWA4ALNFig8vXJTm2ql5cVftPbqyq/avqJemCy9cNoD4AAGAE9eHkqel6JU412UtxQeHlPBfKeWySzUmOSXJ8f3/kEntDHpXk8MxvxXIAYBlVa7P9QXOOg6oqyeuTPCzdsPAf9LsOSxeGvinJw9tiTr6Mqmq/JBcn2dha27HS9QAAwDjoex9OpAspZwr8WrqekEcudPh2H3ienC5MnHR2uqHgAx+uvXnLtj9M11t0d46f2HrsWwd9fQAYd8PM1xbV47J1HpHknkn+MclX+turktyjtfawUQ8tAQCARRtaL8U+nNycwfaqnMt859Zc8IrlAMDSLHZxniRJa+20dPPYAAAAa8d8VwBf1ErhfS/N0xdz7CJMzq25u96ji1qxHABYvCUFlwAAwJo0716KA1x1fCgmth67c/OWbSekm69z4CuWAwCLt6ih4tV5bFV9qqrOr6qdM9yuHnSxAADASJjPCuBnJzkog1t1fGj6YejHZcArlgMAS7PYxXlelOQpSb6Q7j8tF87UrrX27KUUN2wW5wEAgMWZsqp4MnMvxRcleeoc+0cuEBz13qEAMIqGma8tNrg8N8nprbUHD7KY5Sa4BACAxZtjBfCnJHlJhrDqOAAwWkZuVfEk+yT5r0EWAgAAjJfZVgBPcn6GtOo4ALB2LHZxng8n+ZUkrxlgLQAAwJiZaQXwzVu2DXXVcQBgbVhsj8vHJ7lLVT2jqg4cZEEAAMDYm/eq40OtAgAYa4sNLr+R5CZJ/jbJuVX106raMe128eDKBAAAxsh8Vx0/Y9kqAgDGzmKHir8zs/8nBAAAWMMmth67c/OWbSekW3W8ZeZVxU+0MA8AMJdFrSq+WlhVHAAAhmeOVcdP7Bf2AQDG3DDzNcGl4BIAAIZm85Zt69OtHn5oujktz9DTEgBWj5EMLqvqRkmekeSYJDdIcr/W2seq6qAkf5PkDa21zw+s0iEQXAIAAADA4g0zX1vUHJdVdet0E2mvS/LJJDebPFdr7fyq+vUk103y/w2oTgAAAABgDVns4jx/l+SiJHdJN7n2udP2b0vy+4svCwAAGAWGegMAK2XdIo/7jSSvaq2dl5lXFz8ryaZFVwUAAKy4fnGdiSSnJXlLfz/RbwcAGKrFBpfrklw6x/6Dk1yxyHMDAAArrA8nT821OyRsSnKq8BIAGLbFBpefS3LsTDuqakOSP0jyv4stCgAAWDn98PCT+4c1bffk45P6dgAAQ7HY4PIFSe5TVa9Kcpt+2w2r6l5JPpTkVkm2DqA+AABg+R2V5PBcO7ScVEmO6NsBAAzFohbnaa29v6oenu6vsI/pN7853X9gdiR5aGvtYwOpEAAAVqkRXvjm0AG3AwBYsMWuKp7W2puq6l1J7p3kZul6b56Z5IOttUsGVB8AAKxK/RyRJ6fr2Thp++Yt206Y2Hrsu1aorEnnDLgdAMCCLTq4TJLW2k+T/NuAagEAgDVhysI3000ufHPcCoeXZyTZ3tcz03Dx1u8/YzmLAgDWlmqtLfygqhvNp11r7awFn3wZVdV+SS5OsrG1tmOl6wEAYPXrh4dPZPeh4JErOWx8Wrg6tc7JDxArHa4CACNgmPnaYhfnmUjy3XncAACAaxqLhW/6UPK4JN+ftmt7hJYAwDJY7FDxR+bnf2mdtD7J5iQPTXJuklcsviwAAFi1xmbhm4mtx75r85Zt78loLiAEAKxyi11V/JTZ9lXVC5N8MsnGRdYEAACr2VgtfNOHlKevdB0AwNqz2KHis+oX7HlDkicP+twAALAKTC58M9tk8y3J2bHwDQCwxg08uJxy3kOGdG4AABhbfQ/GE/qH08PLyccnGo4NAKx1Aw0uq2q/qvqdJE9N8vlBnhsAAFYLC98AAOxetTbbCJU5DqraldmHtlSSs5Lcv7X2hcWXNnzDXK4dAAB2Z/OWbetj4RsAYIwNM19bbHD5rMw8rOXCJGcm+VBr7eolVzdkgksAAAAAWLyRCy5XC8ElAADA2qGXM8DgDTNfG9biPAAAADAyNm/Z9sAkE0lOS/KW/n6i3w7ACJpXj8uqev0izt1aa//fIo5bNnpcAgAArH59OHlq/7Cm7Jr8QGxRLIBFWvGh4lU1kdkX45lNa63dZDFFLRfBJQAAwOrWDw+fSLIp1wwtJ7Uk25Mcadg4wMINM1/bMJ9GrbXNg7woAAAALJOjkhw+x/5KckTf7vTlKAiA+ZlXcFlVn0vyjNbaB/rHD03ysdbaxBBrAwAAWBKLsZDuZz/IdgAsk/kuznO7JAdNefyGJHcbfDkAAACDYTEWeucMuB0Ay2S+weX3ktyrqtb3jysLn/MSAABgWUxZjGXTtF2bkpwqvFxTzkg3h+Vsn2FbkrP7dgCMkPkuzvPUJC9MsjPJZUmum+SKJFfPcVhrrW0cRJHDYnEeAABYfSzGwnRWFQcYnlFYnOdFVfXFJMckuWGShyX5dJLvDLIYAACAAbAYC9cwsfXYd23esu24JCfnms+N7UlOFFoCjKZ5BZdJ0lr7UJIPJUlVPTzJq1trbxlSXQAAAItlMRaupQ8v3xOLNQGMjXkHl1O11uY7NyYAAMBysxgLM+pDytNXug4A5mdRweWkqto3yY2TXD8zzB3TWvvYUs4PAACwCJOLsexujkuLsQDACJvX4jzXOqjqoCQvS/KgJOtnapJucZ6Z9o0Mi/MAAMDqZDEWAFgew8zXFhtcvivJfZO8NN1fKS+cqV1r7aNLqm7IBJcAALB69eHl9MVYzo7FWABgYEYxuPxJkle21v5ikMUsN8ElAACsbpu3bFsfi7EAwNAMM19b7ByXlyaZGGAdC1JVm5P8dZJ7JDkkyQ+SvDnJ81prV65UXQAAwGixGAsAjK/Frg7+5iQPGGQhC3TLdLU/NskvJnlykj9J8vwVrAkAAAAAGJDFDhW/W7rFec5L8pp088Rca7hFa+1zSy1wATU9NcnjWms3WcAxhooDAAAAwCKN4lDx/57y79+cYX+lW61vOVcV35jkgrkaVNVeSfaasmnfoVYEAAAAACzKYoPLRwy0iiWqqpsleVKSP99N06cneebwKwIAAAAAlmJRQ8WHpaq2JnnabprdqrX29SnHbEry0SSnt9YetZvzz9TjcnsMFQcAAACABRvmUPFRCy4PTnLgbpp9Z3Ll8Ko6LN0Kgf+b5OGttV0LvJ45LgEAAABgkUZijsuqesoCz91aay9Z4AHnpVvwZz71bEpyWpLPJnnEQkNLAAAAAGB0zbvHZVUtNBhsrbWhLM7Th5anJ/lekodlyormrbUfLuA8elwCAAAAwCKNRI/LJEcO8sJL9JtJbtbftk/bV8tfDgAAAAAwSCM1x+Vy0+MSAAAAABZvmPnaukGeDAAAAABgEASXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwchYUXFbVYVV12DzaHLq0sgAAAACAtWzewWVV/XKSs5L8wW6a/kGSs6rqtkspDAAAAABYuxbS4/IJSb6Z5CW7afeSJN9I8qeLLQoAAFhlqvZMVa10GQDA+FhIcHlMkre31tpcjfr970hyz6UUBgAAjLmqW6fqFam6MMkVSa5I1XtS9VtCTABgdzYsoO2hSSbm2fasJHPOhQkAAKxiVY9O8o9Jzt1Z9Y+n3+ROSXLEnc/+yl32vfKyDyR5U6oemdauXtE6AYCRtZDg8qdJDphn2+snuXTh5QAAAGOv6n5JXpPkFbf/07d+7OJ99v2HJIcnSVrLcV/+rwv+7v0n/9G65MIkJ6xgpQDACKvdjPz+ecOq05KktXbMPNp+JMm61trRS6puyKpqvyQXJ9nYWtux0vUAAMDY64aAfyHJD2/y1Pe8ete69adO7pnSqj32f0/N0z56SluXHJHWfrDsdQIAAzHMfG0hc1z+c5K7V9WT5mpUVU9Mcvckb1xKYQAAwFi6S5LbXbF+j5fsWrf+5H7b9Pks6y13+O1cvmGv2lnrHrXM9QEAY2IhweUbk3wwyUlV9f6q+qOqul1VHdnf/1FVvT/JyUn+M8kpQ6gXAAAYbXdIsvO2J77tinTDw2dchOeSva9bnzn81nXeda9vUU8AYEbznuOytbarqh6Q5O+TPCbJvac1qSQ7k7w6yZ/tbvVxAABgVaok7ep16w/ZXcNdtS6Xb9hzn2WoCQAYQwvpcZnW2uWttScmOTLJY5OclOT1/f1jkxzZWnt8a+2yAdcJAACMh68l2fCXp73+4Lka7XPl5bnjD76en+y5z9eXqS4AYMzMe3GeBZ206qAkf9Bae/nATz5AFucBAIAB6xbn+b9dyf/d5C/ed6dUbcoMw8Uf+rl/b8/6z1fXxftc76bXv3THd5a/UABgEEZlcZ45VdV1qur4qtqW5Pvp5roEAADWkq5nxHPWJfd/x7887X/TdZS4Rm+Jo8/8TPvLj7yuvn3g4R8RWgIAs1lSj8uqWpfkt5L8UZL7JblOkm8neW+S97XWPjaIIodFj0sAABiSqr9I8sJL9tznB//4q8dd9+s32LzxgEt35AFfPS13O+tL2b7fDT59+I5zj0prV6x0qQDA4g0zX1tUcFlVd0kXVj44yUFJvpfkxkke01p73SALHCbBJQAADFHV3ZI8qSUPqmSPJLlw732/fL0rL33hHrt2/mta27nCFQIASzTMfG3eq4pX1S3ShZXHJ7lJkjOT/FOStya5Isk3k1w4yOIAAIAx1tr/JPmfqto7yfWT/OT6l+24ZIWrAgDGxLyDy3SrA/4wXVD5ttbapyd3VNVNB10YAACwSrR2eZJzVroMAGC8LGRxnqvS/ZX0xkmOqKq9hlMSAAAAALDWLSS4vGGSP01ycJJ3JDm3qv65qu6Tfr4aAAAAAIBBmHdw2Vq7uLX22tba0Uk2J3l+ktsn+Y8kn0rSktyyqvYcQp0AAAAAwBqyqFXFr3GCqtsleUiSP0hyeJKfJPnPJO9trb1xyRUOkVXFAQAAAGDxhpmvLTm4vMbJqo5OF2I+MF2x6wd28iEQXAIAAADA4g0zX5v3UPGq+kFVPWDK4z2r6qFVdcPJba2101trj0pySJLjBlkoAAAAALB2LGRxnkOS7DPl8b5J3pDkF6c3bK1d2Vr7tyXWBgAAAACsUQsJLmdSA6kCAAAAAGCKpQaXAAAAAAADJ7gEAAAAAEbOhgW2f2hV3aX/995JWpInVtX9Z2jbWmsnLKU4AAAAAGBtqtba/BpW7VrguVtrbf3CS1o+w1yuHQAAAABWu2Hma/PucdlaM6wcAAAAAFgWwkgAAAAAYOQMJLisqv2q6vVVdctBnA8AAAAAWNsG1eNynyQPS3LYgM4HAAAAAKxhgxwqXgM8FwAAAACwhg1tjsuqutWwzg0AAAAArG7zDi6r6rVz7L4yyUeTXNi3/ZUkH1taaQAAAADAWrVhAW0fWVXVWvv/pu9orV2Y5JgkqapjkrwnyWWDKREAAAAAWGsWMlT8r5I8oqpeP1uDqrpfkm3pel7+xhJrAwAAAADWqHn3uGytPb+qdiV5flWtS/KI1lqb3F9VD0vy2iTfTvKbrbXtA68WAAAAAFgTFjJUPK21rVW1M8kLk6yrqoe11lpVnZDkxUk+n+Q+rbXzh1ArAAAAALBGLCi4TJLW2ov68PLvk1RVTST5y3SL8/xua+2SwZYIAAAAAKw1Cw4uk6S19uKqujrJSUlakvcmeXBr7coB1gYAAAAArFHzDi6r6qUzbP5ekhsk+UGSv6+qqftaa+2EpZUHAAAAAKxFNWV9nbkbdgvzLERrra1feEnLp6r2S3Jxko2ttR0rXQ8AAAAAjJNh5msLWVV83SAvDAAAAAAwG2EkAAAAADBy5h1cVtWdq+qAebY9sqoeuviyAAAAAIC1bCE9Lj+R5D6TD6rqgKq6tKruPkPbuyV5w1KLAwAAAADWpoUElzXD472TjPQCPAAAAADA+DHHJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNnwwLbb66qO/b/3tjf37yqLprW7sglVQUAAAAArGnVWptfw6pdSaY3rhm2/Wx7a22kF+6pqv2SXJxkY2ttx0rXAwAAAADjZJj52kJ6XD5ikBcGAAAAAJjNvIPL1tobh1kIAAAAAMAki/MAAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjBzBJQAAAAAwcgSXAAAAAMDIEVwCAAAAACNHcAkAAAAAjJyxDy6raq+q+kJVtaq6w0rXAwAAAAAs3dgHl0n+LskPVroIAAAAAGBwxjq4rKrfTnLvJH++0rUAAAAAAIOzYaULWKyqumGSf0py/ySXzvOYvZLsNWXTvoOvDAAAAABYqrHscVlVleSUJP/YWvvMAg59epKLp9y2D746AAAAAGCpRiq4rKqt/SI7c91umeRJ6XpLvmCBl3hBko1TbocP9isAAAAAAAahWmsrXcPPVNXBSQ7cTbPvJHl7kvsmmVr8+iQ7k/xLa+1h87zeful6Xm5sre1YeMUAAAAAsHYNM18bqeByvqrqRkn2m7LpsCQfTHJckk+21uY1BFxwCQAAAACLN8x8bSwX52mtnTX1cVX9pP/nmfMNLQEAAACA0TVSc1wCAAAAACRj2uNyutbaRJJa6ToAAAAAgMHQ4xIAAAAAGDmCSwAAAABg5AguAQAAAICRI7gEAAAAAEaO4BIAAAAAGDmCSwAAAABg5AguAQAAAICRI7gEAAAAAEaO4BIAAAAAGDkbVroAAGAZVN0hya+k+93/rSSnpbWdK1oTAADAHASXALCaVR2T5PlJ7pKkJdmVZH2S76Zqa5J/SmttBSsEAACYkeASAFarqgcmeVuST125bsNxd37iP1+0Y6/r3uDPz3jT9f/kf9/56+vSXp3kpkmetrKFAgAAXFut5U4WVbVfkouTbGyt7VjpegBgYKoOS3Jmkvfe+snveMele+7zkiSHT2mx/XWnPvtD9zzz049M8rtp7X0rUicAADDWhpmvWZwHAFanRyfZea//75Xvu3TPfd6eZNO0/Zv+v+Oe+Yjzr7PxG0n+dPnLAwAAmJvgEgBWp+N3pd727YNu9IL+cU3bX0nyD0c95OAk90rVIctaHQAAwG4ILgFgdTrkc5tueWW64eHTQ8tJ9fWDjzyg//fBy1MWAADA/AguAWB1+klLHba7RvtffsnkP3863HIAAAAWRnAJAKvTB277w2/dZd2unXM2euBXPpJL99jr7CTfXZ6yAAAA5kdwCQCr0yv33nnVDR716XdflKTN1OAOP/hG++1v/k/2vPqql6S1GdsAAACsFMElAKxGrX02yUlPP/0NG598xptr42WX/CyY3PPqq/LAr3y4/fPb/rou2vt6X9/Qdr1qBSsFAACYUa3lDhZVtV+Si5NsbK3tWOl6AGCgqtYlefauqi1Xrtuw/lNH3KauWr8htzvnWzn40oty1sYbfvJGF//o3vE7EAAAWKRh5muCS8ElAKtd1Q121rpHnnvd6//2lRv2uM7lG/b8zmE7zn/Ovlf89KsrXRoAADDeBJdDIrgEAAAAgMUbZr5mjksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAIat6qBU/UWqPpuqs1L1lVS9IFWbV7o0AACAUSW4BIBhqnpAkrOSPGdX6ptfvcFNPvq1g4889+pa96SWnJmqp6x0iQAAAKNow0oXAACrVtVvJnlHknf97kNfvO1Lh/7Cc5McniT7XHl5/uJjb7zkEZ993z+k6tK09o8rWisAAMCIqdbaStewYqpqvyQXJ9nYWtux0vXA0FTtk+TBSf44yWFJLkvyX0lenda+s5KlsUpU3TDJ7yY5IN376ra0dvbKFrXCqirJF5JceOsnv+Pll+65z9sn90xp1ba+/6X1oK985Kd77Lr6Bmnt0mWvEwAAYAmGma8ZKg6rXdWdkpyZ5JSWtG8deMRXv37QjS+5et36x7fk26l6Th+wwMJVHZiqNyc5O8mrkjwtycuSTKTqnanatKL1ray7JrndFev3eOGle+7zkn7b9NdaveKuv9fW79p53avWrf/DZa4PAABgpBkqDqtZ1a3S9az8vyfd96nPe9+t774l/TDVva+6PE/8xNt3PPETb//rJFcnec4KVso4qjo4yRlJDkzytO9tPORNd/+T197miIvOOfIFH3j5bX/te1/8vUr+N1W/ltbOWuFqV8Ldkvzktie+7fL0r7uZnL3/ofWVQ26ajZf/5IE3Tl63fOUBAACMNsElrG7PTXLePR71jy//zoGHv2nqjsv32Dt//xsP3bclecIn3vE366pek9Z+uDJlMqZemeT6Se62+Wn/ftskn09y+Nn7H5qH/MHzctiOc8/5z9c+fv11r7r8jUmOWdFKV8YeSa64csOeh+yu4RXr98wV6/fcdxlqAgAAGBuGisNq1Q3Rvd/Vte6k7xx4+NbJrdNb/dOdH9iu2LDHup217jHLXCHjrOrwJA9M8qw+tDw1yTWGhf9gvxsc8tT/d8LBSY5O1W1XoMqVNpHkwD/4wgfm/F2791WX5xbnfy9XbNjTfLMAAABTCC5h9bprkvUPffBztqcbpjrjPJY79r5efezIO9aF++x732WtbtxUrevnc9zfnKBJkuOSXPl/B29+S5KT+23XCsY/dPO75sK99921s2otzt/47iQXPu9Dr7hTku1JZlwN7wFfPa1d74pLc/iOH5muAQAAYArBJaxeeyXJmQcesf/uGl66x97ZuW79dYde0TiqOiJVL0jywyTnJ7kwybdS9dRU7b+ita2sg5P86Lcf+fJfyhzB+NXrN9TZ+99w3Vn7H3KbZa1uFLR2WZKXrm/tT5//gZe/dXLr1Ca3/8E32jNOe3197/qHfPz6l+7Q4xIAAGAKc1zC6nVWkjz6U+/a77n3fPTsrVrLbX50Zq5cv+Hs5SpsbFT9epL3JamdVW/cdsujzr9kz+scdPfvfu5Wm3ac+9xKHpuqe6e1tRg4XZLkgOtdcenhP9nrOrO3ai0HXHpxzr3eAeuOXLbSRsrfJrnF8V/8wJ8fNfG5Tz/3Ho+6ybcPPOKgg396YR705Y/kd//v9Lpkr+t+/cgLz/ntlS4UAABg1AguYfX6eJJvP/Iz7/21597z0dvTzT94rV5xdzvrS+3mPz67Ll+/x98te4WjrOrIJP+e5Av3fuTLX/vNgze/IFNWht58wffP+fc3nnid61152QdSdYe0dumK1boytiV5wdYPvPSmT7zfllkb3eXsL+fwHeflwzf71f+64/LVNjpa25mqP0rykSMuPvdJr/635x80uevyDXv+aMOuXScddOnFJ/e9MwEAAJjCUHFYrVrbleTv16X9/ivf/YL3TW6d2mTTxee2v/uPk+vCvff9zt47r/rI8hc50k5McuU9HvWP//TNgzf/c6YtPDNxwKZD7vfQFx/SkpslWXvzN7b25SRnHPv1/37wfpf/5PuZYf7Gva66In/x0TfmO9c/7Or33+JuL1v+IkdEa7vS2j8luX2SWyf5jSR32PvqKzet37Vzq9ASAABgZtXajGsFrAlVtV+Si5NsbK3tWOl6YOC6RWReluQJ2/c7+DPPuedjNn/lkJsetN/lP839vvbRHP+F9+/asGvnede96vI7p7WzVrrckVG1d5If7ax65U3/4n0PySy9VZO0N73tr6749YkvfLlau/PyFjkCqm6d5OMX7X29Cx93/2ds/sSNbts951rLL/3gG3nGaa/LbX50Zk7+teOfvuX0N2zd7fkAAAAYO8PM1wwVh9WstZaqJyX59OE7znvya/7teT8bpnp1rftppb1ufWvPS2vnrmCVo2hTkv3eevv7nJ8pw8NnUKff5E573+17X7zt+mUqbKS09rVU/cb+l//krW/912fU2RtvcPVZ+x+y4YaXXJCbXbA9Z2+8wdUvvdsf/rXQEgAAgMUQXMJq13WrfmOq/jnJbZIcluSyDW3XZ9PaT1e2uJHVkuTCffY7cPctW1qtm3FF7TWhtS+n6rZJ7r7p4vMevH7XrlteuM9+u95229/80BcP+4UXP/8DL796pUsEAABgPAkuWT2q9kly/yQ3T3J1kk8n+XA/1yNdgPnl/sbcvp/kwnt/639v8g+/8cdzNrz7dz+Xn+y5z7f3X5ayRlT33Dp9XXL6YemS8V9M8vsrWxUAAABjTnDJ+Ktal+QZSZ6c5IAkP0yyZ//vb6XqaWnt31awQsZNa1ek6vW/cP73HnH9Sy/+wYXX2XhoZpjj8pbnfrf9xsTn66p161+0AlUCAADAqmZVccZbt/jMa5M8J8mbLt7rerfY/LR//8ObPPU9T/ynX7n/n+5KvpnkXal6xMoWyhg6uZL2X6993E8O/OlFybRVs292/lntde98Tl2813XP2mPXzretSIUAAACwillV3Kri463qD5O8Jckfb37av1+a5ORMWUyldu3a/v5TnvS1W573vXskuVla+94KVco4qrpTkv/YWes2vvdWv3HVf2/+petu2HV17nnmp3PPb38yP93zOmfvd8VP75bWtq90qQAAALAShpmvCS4Fl+Ot6uNJLtv8tH9/ZZJTJ7dOadH2ufLyfOnk379sj107T05rz1j+IhlrVQcneXRLHlPJjZPkJ3vuc+beV1/59xt27XyTBY4AAABYy4aZrxkqzviq2pTkblet2/DadD0tk2vPQ1iX7bl33vWL99jVrBXCYrR2Xlp7frW2OcleSfa43hWX3mzDzqv/UWgJAAAAw2NxHsbZQUnymjs/4PqZMjx8BnXmgYdfb2etK094lqS1K1e6BAAAAFgr9LhknF2cJHtffcVNd9fwBj+5IFdu2OPy4ZcEAAAAwCAILhln30vy5WO//t+/OlejPXZelft97aP5wb4Hf3SZ6gIAAABgiQSXjK9uZalX3vAnF9ztmDM/fV6SGVeaevwn3tEOvvSi3PAnFzxzXuet2itVx6fq5FS9IlVP6RdoAQAAAGCZWFXcquLjrWpDkvfurHX3fMHRj9jj7bf7zezY+3qVJIdf9MP8ySffmYd84f35xI1u+9a7fu9Lx8/jfI9K8vwkB/90j73PunL9Hm3j5T85rNJaJa9N8mTzHAIAAAB0hpmvCS4Fl+Ovau8kL92VeuQVG/ZY942DN9ceO6/Orc79bn665z7tU0f84uvveeanH53dPdmrnpLkH7514BGnPeF+T7vVNw/efEiS7H/Zjjz0c/9+8Ykff+t117X2wST3T2tXD/8LAwAAABhtgsshEVyuMlWH7qx1D9u+8QZHXbLndfY673rX/+x+l//0b3/5+//3k3kce/Mk3/j8obd4zwMe+g/3m9w6pUX7je98Nm98xzNbJY9Pa68expcAAAAAME4El0MiuORnql7ckofd8invvPSKPfbalGuGlpPaG97xzMuP/s5nv13J7XfbgxMAAABglRtmvmZxHujc/8wDDv/oFXvsdXhmDi2TpP7lDv9vn0pum+TIZawNAAAAYM0RXEJn4/aNN7hid41+uO+Bk//cf6jVAAAAAKxxgkvoXHCji354nd012nTxuT9rP9xyAAAAANY2wSV0Tj3ywh/c/TpXXvb9JLPNXdn++PPbLm/JZ9LaxDLWBgAAALDmCC6h8+pKrnfqm//iy+nW3JkeXrbf/vp/59e/98W9K3nFCtQHAAAAsKYILiFJ34Pycbc+77v3OePVj/rUnc7+6nl9gJnDdpybv/7way55xXu2tiT/muSfV7BSAAAAgDWhWpttVOzqN8zl2hlTVQ9O8sIkm69cv+GCq9Zt2HWdqy4/IMlPK3lZkr9JaztXtkgAAACA0TDMfG2sg8uqOjbJ3yS5XZLLk3y0tXb/BRwvuOTaqtYn+a0kv5pkQ5JvJ3lHWvvJitYFAAAAMGKGma9tGOTJllNVPSjJPyV5RpKPpPtabrOiRbE6dD0q/6O/AQAAALACxjK4rKoNSU5O8tTW2uum7PraCpUEAAAAAAzQuC7Oc8ckm5LsqqrPV9U5VfX+qpqzx2VV7VVV+03ekuy7LNUCAAAAAAsyrsHlTfr7ZyV5bpLfSXJhktOr6oA5jnt6ujH3k7ftQ6wRAAAAAFikkQouq2prVbXd3G6Zn9f9vNbaO1trn03yiCQtye/NcYkXJNk45Xb4ML8eAAAAAGBxRm2Oy39Icspu2nwnyaH9v382p2Vr7Yqq+k6SG812YGvtiiRXTD6uqkUXCgAAAAAMz0gFl62185Kct7t2VfXZdAHkLZL8d79tjySbk3xviCUCAAAAAMtgpILL+Wqt7aiqf0zy7Ko6O11Y+dR+9ztWrjIAAAAAYBDGMrjsPTXJ1UnelGSfJJ9Mco/W2oUrWhUAAAAAsGTVWlvpGlZMVe2XbnXxja21HStdDwAAAACMk2HmayO1qjgAAAAAQCK4BAAAAABGkOASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5G1a6AFaZqr2S3D/J7ZNUkq8meWdau2wlywIAAABgvOhxyWBUVaqekOSsJP+a5I+S/GGSNyXZnqqnpqpWskQAAAAAxocelwzKs5L8TZLXXbLndV5y2ye//eAkhx7/hfeve+6HXnnXda39XZLDkjx5JYsEAAAAYDxUa22la1gxVbVfkouTbGyt7VjpesZW1V2SfCLJMzY/7d+/keTkJIdPabH9Ne/82/ff+9uffHSSe6e1/1yJMgEAAAAYrGHma2M7VLyqfqGq3lNV51fVjqr676o6ZqXrWqOekOQ7N//zf/tmklOTbJq2f9NjHvhXj7por+t9r28LAAAAAHMa2+Ayyb+nG+p+jyS/nOSLSf69qg5Z0arWmm7eygftrHVvuGr9HidNbp3eKlV55V1/b7+W3DdVey9vkQAAAACMm7EMLqvqoCQ3T7K1tfal1tq3kmxJcp0kt1nR4taevZLs85Gb3mmfdMPDZ1uAp757wKbrV/ec27hs1QEAAAAwlsZ1cZ4fJ/lGkodW1eeSXJHksUnOTfLZ2Q6qqr3SBW2T9h1mkSuq6vAkd0qyR5JvJ/lChjOh6RVJrthj59U32V3DG17y47SkVWI+UQAAAADmNJY9LlsXwN0ryS8luSTJ5UmekuQ+rbUL5zj06ekmC528bR9yqcuv6vapeneS7yX5tyRvT/K5JJ9N1YMHfr3uZ/HuO2//6q9lrly0tfz+lz6U8667/yfT2mUDrwMAAACAVWWkgsuq2lpVbTe3W1Y3r+Ir0vWwPCrJnZO8O8n7qurQOS7xgnTDlCdvh8/RdvxU3T3Jx5Pc8up165/wiOOe+aB7P/IVj377be+1ZVfyoyRvS9VfDuHKL7/OVVcc8SefPPWiJDOmlw/+8n+22/7ozGy8/CfPH8L1AQAAAFhlajijhxenqg5OcuBumn0nXVj5oSTXn7rMelV9K8nrWmtb53m9oS3Xvuyqrp/ue/PZuz7uDa89Z7+DX5SpwWxr29/21qd//FfP/srvJ/nttPaBAV//RUn+/A2/fN+8/k73a2fvf0glySE7zs/DP/e+POaT78o3D77Rh2553vfuM6Qh6wAAAAAss2HmayMVXM5XVd03XQ/Lja21n0zZ/o0kb2ytzatX3yoLLp+c5IUP/sOtf/KpG93mtZNbp7RoaS2fe9kfnXnAZTvOTGv3GfD1K8lTr1q3/pnrd+26zpkHHp5dVbnpj7fnig17tq/c8Kbv+NXtX/3DtLZroNcFAAAAYMUMM18bqaHiC/CJJBcmeWNV3b6qfqG6Hn9HJtm2sqWtmIfuSv3bp250m2f3j6ev7l2pyt8f9ccHJPmtzD2kfuFaa2nt7/bYtfMGO9ete1SSd162x14f+MSNb/fis/Y/5Pq/evZXfl9oCQAAAMB8jeWq4q2186vqPkmel+Qj6VbO/mqS+7XWvriixa2cTV889OafydzzdtaXDr35Af2/D0tyzsCraO2neySvu3nyuoGfGwAAAIA1YyyDyyRprX0myW+tdB0j5PKd69bfcHeN9r3i0sl/WtkbAAAAgJE1rkPFubaP3OZHZ/7Kul0752x03//7aK5Yv8f5Sb65PGUBAAAAwMIJLlePV+599ZWHPOTz/3FhkhlXXLrpj89uD/jqaW3Drp0vT2tXL3N9AAAAADBvgsvVorVPJXnts/7r1Rsf+el3Z6+rrmhT9uWo736u/cu//mVdtX7D9vVt10krVicAAAAAzEO1NmPnvDVhmMu1r4iqDUlOasnjd+x13fbRm/zyuivX75Ff+sE3ctMLtuf862z8xkGXXnx0WvvhSpcKAAAAwPgbZr4muFxNweWkqpvurPqTHXtd7zevXrf+Olds2HPioEsvev7eV1/50azlHzgAAAAAAyW4HJJVG1wCAAAAwDIYZr5mjksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILgEAAACAkSO4BAAAAABGjuASAAAAABg5G1a6gBGxb1WtdA0AAAAAMG72HdaJ13pwOfmN3b6iVQAAAADAeNs3yY5BnrBaa4M831iprpvlYUkuWelaVpF90wXBh8f3FUaJ1yaMJq9NGE1emzB6vC5hNE2+Nm+Z5JttwEHjmu5x2X8zv7/SdawmU4bcX9JaG2jKDiye1yaMJq9NGE1emzB6vC5hNE15bZ4z6NAysTgPAAAAADCCBJcAAAAAwMgRXDJoVyR5dn8PjA6vTRhNXpswmrw2YfR4XcJoGuprc00vzgMAAAAAjCY9LgEAAACAkSO4BAAAAABGjuASAAAAABg5gksAAAAAYOQILlmSqjq0qrZW1WlVdUlVtao6egHHP6s/Zvrt8uFVDavfUl+b/Tk2VdXbq+qiqtpRVe+pqpsMp2JYG6pq/6p6TVWdV1U/7V+jd5znsafM8jvz68OuG1aLqtqrql5YVT+oqsuq6pNV9ZvzPNbvRRiCxb4ufZaE4aqq61XVs6vqA1V1Qf/6evgCjl/0/3un2rDQA2CaWyR5WpJvJflykrsu8jyPS/KTKY93LrEuWOuW9NqsquslOS3JxiTPT3JVkicn+WhV3aG19uPBlgurX1WtS7Itye2TvCjJ+Uken+T0qvrl1tq35nGaK5I8atq2iwdaKKxupyQ5LslJ6X5HPjzJf1TVMa21/57tIL8XYahOySJel1P4LAnDcVCSv0lyVpIvJjl6vgcO6P+9SQSXLN1nkxzYWrugqo5L8o5FnufU1tr5A6wL1rqlvjYfn+TmSe7cWvt0klTV+5N8JcmfJXnGIIuFNeK4JHdL8nuttVOTpKrenuSbSZ6d5Ph5nOPq1tqbh1cirF5Vdeckf5Dkqa21v++3/XO6321/l+71ORu/F2EIlvi6nOSzJAzHOUkOba39sKrulOTTCzh2EP/vTWKoOEvUWruktXbBAE5VVbVfVdUAzgVr3gBem8cl+fTkh7P+nF9P8uEkD15qfbBGHZfkR0neNbmhtXZekrcnuV9V7TWfk1TV+qrabzglwqp2XLqeWK+Z3NBauzzJ65LctaqO2M2xfi/C4C3ldTnJZ0kYgtbaFa21Hy7y8IH8vzcRXDI6vpNuqNslVfXmqrrhShcEa1Xfrf92ST4zw+5PJblpVe27vFXBqvBLST7XWts1bfunklwnyS/M4xzXSbIjycX9XEOv6IewArv3S0m+2VrbMW37p/r7O8x0kN+LMFSLel1O47MkjJ5B/L83iaHirLwLk7w8ySfSzdt1VJInJLlzVd1phl9gwPAdkGSvdEMDppvcdliSbyxbRbA6HJrkYzNsn/q6+vIcx5+Tbtjc59L98fk+6Yav3r6qjm6tXT3AWmE1OjS7/902E78XYXgW+7pMfJaEUbbU//f+jOCSn+n/mrznPJtf0VprS71ma+3kaZveWVWfSvIv6T6MbV3qNWDcrcBrc5/Jc82w7/JpbWBNWuTrcp8s4XXVWnv6tE3/WlXfTPK8dMNx/nWe9cBatdjXoN+LMDyL/t3osySMtCX9v3cqQ8WZ6jeSXDbP2y2GVURr7S1JfpjkXsO6BoyZ5X5tXtbfzzTvyN7T2sBatZjX5WUZ/OvqJUl2xe9MmI/Fvgb9XoThGejvRp8lYWQM7LWtxyVTfT3JI+bZdqbu/IN0drphOcDyvzYvSPfXsUNn2De57QcDuA6Ms8W8Ls/JgF9XrbXLqurH8TsT5uOcJJtm2L6716DfizA8i31dzsVnSVh5A/t/r+CSn+lXizplpevoV4PbnOTzK1wKjITlfm221nZV1ZeT3GmG3b+a5DuttUuWqx4YRYt8XX4hyVFVtW7aROW/muTSJN9caB39giAHJTlvocfCGvT/t3fvwXZW5R3Hv78ElEGRoCDFqoRGBlrRWqtILVUSVASntIjUjiBSQRTH4VYtpYJQL9xRK4w1lZuA5dKAKLSQ6jRBwKkFKiBQucQEQYrcU2kIcnn6x3oP7O6ck3Nycs7Jjnw/M2f2ed937bXW+569Z7/7OWut5wZgdpKX9K1995ae4yvwc1GaVDcwjvflSPwuKQ2MG5ig+16nimvKJHl1kq379m0yTNEDgE2AK6akY9Lz3HDvTWAe8OYkb+optxUwB/inqeyf9GtkHrAp8N6hHUk2BvYALq2qJ3r2z0oyq2d7vRGyFh8JBD8zpbGYB0wH9h/akeSFtNHTP6yqu7t9fi5KU2fc70u/S0qDIclmSbZOsm7P7jHf945a/wTkV9HzXJIjul9fC/w5cAawGKCqPt9TbiHw9qpKz75lwAW0bFLLge27Om4E/rCqlk3BKUi/llbzvbkB7T/VGwAnAU8Ch9JuLN9QVY7uklZRkunA1cA2wInAg7TkAa8G3lxVt/WUXQJQVTO77Zm09+R5tGnqADsBu9C+nL2n77/ZkoaR5EJgN9r6sHcCHwK2BXasqu93ZRbi56I0ZVbjfel3SWmSJfkEMIOWBfwA4GKeG9F8SlUtTXIW7X27RVUt6Z435vveUftg4FKrK8mIL6K+D5aFrPhh83XgrcCraIu03gVcBHzBKTfS6lmd92a3/5W0G8h30UboLwQOqao7J6O/0vNBko1oN29/SsumeC3wyaq6rq/cEvh/gcsZwCnAdrQbx+m0L3ffBE6qqienov/S2i7JesDngL2AjYCbgCOran5PmYX4uShNmfG+L/0uKU2+7p508xEOb1FVS4YLXHbPHdN976h9MHApSZIkSZIkadC4xqUkSZIkSZKkgWPgUpIkSZIkSdLAMXApSZIkSZIkaeAYuJQkSZIkSZI0cAxcSpIkSZIkSRo4Bi4lSZIkSZIkDRwDl5IkSZIkSZIGjoFLSZIkSZIkSQPHwKUkSZIkSZKkgWPgUpIkaYokOTpJrel+TLYk6yQ5IcndSZ5Jckm3v5IcvWZ7J0mSpLWFgUtJkqRxSLJPF4gb+lme5N4k85McmGSDNd3HIUnW74KmO4yx/A7dOb1vnE1+GPgUMA/4EPClcdaz1kuyfZLLk/y8e438LMmlST6wpvsmSZI06NZZ0x2QJElay30GWAysC/wGsAPwZeDQJLtW1U09ZT8PHDfVHQTWB47qfl84Be3NAX5eVYdMQVsDK8kewAXADcDfAY8AWwBvAz4C/OMa65wkSdJawMClJEnS6rm8qq7r2T42yRzgMuA7SX67qh4HqKqngKdWVlmSacALqmr5pPV48r0ceHRNd2IAHA3cCmxXVb/qPZDk5VPViSQB1ht6HUqSJK0tnCouSZI0warq34DPAZsDew3tH26Ny25K9qlJ9kxyC/AE8O7u2G8mOSPJL5I8keSWJB/uby/Jel3dt3fTkf87ycVJZiWZCTzQFT2qZ2r70atyTkN9T/KaJGcleTTJ0iRnJlm/KzOzO7/ZwGt72tphhDrPSrJkpLaG2b9XkuuTPJ7k4STnJ3lVX5mFSW5O8jtJFiRZ1k3T/qtVuW49ZaYlObi79su7v8XcJBuN4bLNAq7tD1oCVNX9fX2ZluSgJD/u2nkgyRVJ3tRTZp0kRyZZ1L0eliQ5JskL++pakuSyJDsluQ54HPhod2xGki93648+keTOJId1AXNJkqSB4g2KJEnS5Dine3zXGMrOoa0DeQFwELAkyabAvwPvAE7t9t8JnJ7k4KEnJplOG915FHA98Je0ackbAtvQgpYHdMW/BXyw+7l4nOd1IbABcHj3+z48Nw39ga7unwD39LT1X+Ns61lJPg2cDdwBHEqbjr8j8P0kM/qKbwRcAdxIux4/AY5PsnNPfaNdtyFzgROBa2h/gzOBPYH5SdYdpdt3ATsmeeUYTvH07pzuBg6jLSmwHNiup8xpwGeB/wQOAa6k/R3OH6a+rYDzgO92/b6hCzBfSQumnw0c2J3XscAXx9BHSZKkKeVUcUmSpElQVfckWUobdTearYDXVdWtQzuSnAZM7/Y/1O3+WpLzgKOTzO2m/u5NC+AdWlW9SXCOS5KqqiTzgL8Hbqqqc1fz1H5UVfv29PNlwL7AYVX1v8C5SfYDnp6Atoba2Bz4W+CIqjqmZ//FwI+AjwPH9DzlFcDeVXVOV+50WhBxX+DyrsxKr1v3vO2B/YA9q+rZ9SiTLKAFRvdg5etUHk8LSC5Kcg1wNfCvwA+q6pme+mbTAsBfqaqDep5/ck9ffpeW6Oi0qvpId/yrSe4HPplkdlUt6Hnua4B3V9X8nnaOoL0ef6+q7uh2z01yL/CpJCdX1d0rOR9JkqQp5YhLSZKkyfMYbXTiaK7sC1oG2B24tNvceOgHmE8bFfjGrvjuwIPAKf2VVtUK060nwNf6tq8CXpbkJZPQ1pD30u5bL+y7FvfRRmDO7iv/GPBs0LSbqv0fwG/1lBnLddsDWAp8t6/d67s2+tvtr+cM2rT/hcD2wJG063VHkrf29aVowdmR+rJL99g/MvLk7vE9ffsX9wYte87nKuCRvvP5Hi1I/raVnY8kSdJUc8SlJEnS5HkxcP+opVpW8l6bADOA/buf4Qwld5kF3NYl/pkKP+vbfqR73Aj4n0lqc0sgtCDlcJ7s275nmKDtI8Dre7bHct22pAWJR/objppgpwsezu+maf8+8H7gY8BlSbbu1rqcBdxbVQ+vpKrNgWdoywX01n9fkke74736X1PQzuf1PLfmab8pSxgkSZI0FgYuJUmSJkG3ruGG9AWaRtCf7XloVsy5wDdGeM5N4+za6np6hP0ZR10jjQid3rc9rSu78wjtP9a3PVF9nEYLWu45wvGRAoArqKpltNGOVyV5kLa25s6M/Pcdsaoxlhsug/g02pqXJ4zwnNtXsS+SJEmTysClJEnS5Phg99g/XXcsHgB+CUyvqu+NUnYR8JYk61ZV/8jDIZMxZXwiPEIbWdqvf/TgIlrQcXFVTVRwbSzXbREtOdI13XqiE+W67nGznnZ2SvLSlYy6vIsWeNySnmRHXRKnGd3x0SwCXjyG15QkSdJAcI1LSZKkCZZkDm09w8XAN1f1+VX1NHARsHuSbfqPJ9mkZ/MiYGPgE8OUGxphuKx7nLGqfZlki4ANkzw7hTvJZsBufeUupo2iPKrnnIbKp0sQtKrGct0upI3+PHKYMusMk828v8yOIxwaWq/ytp6+hOeysw/Xl3/pHg/uK3Jo9/jPK+tL50LgD5LsNEw7M5I4qEGSJA0Ub04kSZJWz85JtqbdV20KzAHeSRsBt2tVLR9nvX9NS/7ywyRfB24FXkpLyvOO7neAs2kZsr+YZFvadOQXdWW+Cny7qh5Pcivw/iS3Aw8DN1fVzePs20Q5n5Z5+1tJvgKsDxxAm7I8lHyIqlrUZcQ+FpiZ5BLaiNQtaEHOfwBOWsW2x3LdrkwyFzg8yRtoGcGfpI163AM4CJi3kja+nWQxLcnSop76/xi4tttPVS1Icg5wYJItaRnLpwF/BCwATq2qG5N8A9i/C5heCWxLyzR+SV9G8ZGcCOxKW1/zLFqSoRcBrwPeB8ykJSySJEkaCAYuJUmSVs9nu8df0QKCP6aNijuzqn453kqr6hddQO0ztKzaHwceAm4BDusp93SSXYBPAx+gZah+CLi668uQ/WgZtL8EvICWwXqNBi6r6qEku9EyZZ9AG6F6OC0w+Ma+ssd1QddDeG5k4t20YOJ3xtH2mK5bVX0syfXAR4FjgKeAJbT1R68ZpZn9gD8B/gx4BW1U5U+BLwDH9yUG+gvauqX70gKMS2lTyn/QV99PgX1oAdv7aMHcFbKRj3DOy5K8HfgbWuB1b1pCpdtp13TpWOqRJEmaKlkx4aIkSZIkSZIkrVmucSlJkiRJkiRp4Bi4lCRJkiRJkjRwDFxKkiRJkiRJGjgGLiVJkiRJkiQNHAOXkiRJkiRJkgaOgUtJkiRJkiRJA8fApSRJkiRJkqSBY+BSkiRJkiRJ0sAxcClJkiRJkiRp4Bi4lCRJkiRJkjRwDFxKkiRJkiRJGjgGLiVJkiRJkiQNnP8DFJyf6HyoTacAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -914,16 +1491,24 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 39, "id": "8537c4b1", - "metadata": {}, + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [ + "hide-input" + ] + }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Pearson Correlation EK-FAC vs direct 0.9608164875442669\n", - "Spearman Correlation EK-FAC vs direct 0.8946217598307178\n" + "Pearson Correlation EK-FAC vs direct 0.9573912191268695\n", + "Spearman Correlation EK-FAC vs direct 0.8975136660201023\n" ] } ], @@ -948,9 +1533,13 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 40, "id": "a3256f00", "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, "tags": [ "hide-input" ] @@ -960,8 +1549,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Pearson Correlation EK-FAC vs direct - top-20 influences 0.9901775015427601\n", - "Spearman Correlation EK-FAC vs direct - top-20 influences 0.9428571428571428\n" + "Pearson Correlation EK-FAC vs direct - top-20 influences 0.9872023898446971\n", + "Spearman Correlation EK-FAC vs direct - top-20 influences 0.9759398496240601\n" ] } ], @@ -1003,6 +1592,7 @@ "cell_type": "markdown", "id": "9245791c", "metadata": { + "editable": true, "slideshow": { "slide_type": "" }, @@ -1029,7 +1619,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.8.18" }, "vscode": { "interpreter": { diff --git a/requirements-dev.txt b/requirements-dev.txt index 9ab4b25c5..04d60a854 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,8 +3,8 @@ tox-wheel pre-commit==3.1.1 black[jupyter] == 23.1.0 isort == 5.12.0 -pylint==2.12.0 -pylint-json2html +pylint==3.1.0 +pylint-json2html==0.5.0 anybadge mypy==1.5.1 types-tqdm diff --git a/requirements-docs.txt b/requirements-docs.txt index a4e0b016e..f8f106f83 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -1,20 +1,23 @@ -mike +griffe==0.42.1 +mike==2.0.0 markdown-captions -mkdocs==1.5.2 -mkdocstrings[python]>=0.18 +mkdocs==1.5.3 +mkdocstrings[python]>=0.24 +mkdocstrings-python==1.9.0 mkdocs-alias-plugin>=0.6.0 -mkdocs-autorefs -mkdocs-bibtex +mkdocs-autorefs==1.0.1 +mkdocs-bibtex==2.14.1 mkdocs-gen-files mkdocs-git-revision-date-localized-plugin -mkdocs-glightbox -mknotebooks>=0.8.0 -pygments +mkdocs-glightbox==0.3.7 mkdocs-literate-nav -mkdocs-material -mkdocs-section-index +mkdocs-material[imaging]==9.5.16 +mkdocs-section-index==0.3.8 mkdocs-macros-plugin -neoteroi-mkdocs # Needed for card grid on home page +mknotebooks==0.8.0 +pygments pypandoc; sys_platform == 'darwin' pypandoc_binary; sys_platform != 'darwin' GitPython +# Use for the binder link hook +beautifulsoup4 diff --git a/requirements-notebooks.txt b/requirements-notebooks.txt index 461b14156..32b7879d9 100644 --- a/requirements-notebooks.txt +++ b/requirements-notebooks.txt @@ -1,5 +1,7 @@ -torch==2.0.1 -torchvision==0.15.2 datasets==2.14.6 +distributed==2024.4.0 pillow==10.0.1 +torch==2.0.1 +torchvision==0.15.2 transformers==4.36.0 +zarr==2.17.1 diff --git a/requirements.txt b/requirements.txt index aedacc12f..d819d727e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ pandas>=1.3 scikit-learn scipy>=1.7.0 cvxpy>=1.3.0 -joblib +joblib>=1.3.0 cloudpickle tqdm matplotlib diff --git a/setup.py b/setup.py index 37631787e..28b2fa400 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ package_data={"pydvl": ["py.typed"]}, packages=find_packages(where="src"), include_package_data=True, - version="0.8.1.dev0", + version="0.8.2.dev0", description="The Python Data Valuation Library", install_requires=[ line @@ -43,6 +43,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Typing :: Typed", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", @@ -51,8 +52,8 @@ ], project_urls={ "Source": "https://github.com/aai-institute/pydvl", - "Documentation": "https://aai-institute.github.io/pyDVL", - "TransferLab": "https://transferlab.appliedai.de", + "Documentation": "https://pydvl.org", + "TransferLab": "https://transferlab.ai", }, zip_safe=False, # Needed for mypy to find py.typed ) diff --git a/src/pydvl/__init__.py b/src/pydvl/__init__.py index 370991569..399443522 100644 --- a/src/pydvl/__init__.py +++ b/src/pydvl/__init__.py @@ -7,4 +7,4 @@ The two main modules you will want to look at are [value][pydvl.value] and [influence][pydvl.influence]. """ -__version__ = "0.8.1.dev0" +__version__ = "0.8.2.dev0" diff --git a/src/pydvl/influence/__init__.py b/src/pydvl/influence/__init__.py index 32d41b81b..6065b7cf9 100644 --- a/src/pydvl/influence/__init__.py +++ b/src/pydvl/influence/__init__.py @@ -1,9 +1,13 @@ """ This package contains algorithms for the computation of the influence function. -> **Warning:** Much of the code in this package is experimental or untested and is -subject to modification. In particular, the package structure and basic API will -probably change. +See [The Influence function][the-influence-function] for an introduction to the +concepts and methods implemented here. + +!!! Warning + Much of the code in this package is experimental or untested and is subject + to modification. In particular, the package structure and basic API will + probably change. """ from .base_influence_function_model import InfluenceMode diff --git a/src/pydvl/influence/array.py b/src/pydvl/influence/array.py index 330b7fe73..a82b380b8 100644 --- a/src/pydvl/influence/array.py +++ b/src/pydvl/influence/array.py @@ -8,10 +8,11 @@ """ from abc import ABC, abstractmethod -from typing import Callable, Generator, Generic, List, Optional, Tuple +from typing import Callable, Generator, Generic, List, Optional, Tuple, Union import zarr from numpy.typing import NDArray +from zarr.storage import StoreLike from .base_influence_function_model import TensorType @@ -140,7 +141,7 @@ def compute(self, aggregator: Optional[SequenceAggregator] = None): def to_zarr( self, - path_or_url: str, + path_or_url: Union[str, StoreLike], converter: NumpyConverter, return_stored: bool = False, overwrite: bool = False, @@ -153,6 +154,7 @@ def to_zarr( Args: path_or_url: The file path or URL where the Zarr array will be stored. + Also excepts instances of zarr stores. converter: A converter for transforming blocks into NumPy arrays compatible with Zarr. return_stored: If True, the method returns the stored Zarr array; otherwise, @@ -244,7 +246,7 @@ def compute(self, aggregator: Optional[NestedSequenceAggregator] = None): def to_zarr( self, - path_or_url: str, + path_or_url: Union[str, StoreLike], converter: NumpyConverter, return_stored: bool = False, overwrite: bool = False, @@ -257,6 +259,7 @@ def to_zarr( Args: path_or_url: The file path or URL where the Zarr array will be stored. + Also excepts instances of zarr stores. converter: A converter for transforming blocks into NumPy arrays compatible with Zarr. return_stored: If True, the method returns the stored Zarr array; diff --git a/src/pydvl/influence/base_influence_function_model.py b/src/pydvl/influence/base_influence_function_model.py index 0a9a9f33b..73fe53d8f 100644 --- a/src/pydvl/influence/base_influence_function_model.py +++ b/src/pydvl/influence/base_influence_function_model.py @@ -1,10 +1,12 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from enum import Enum -from typing import Collection, Generic, Iterable, Optional, TypeVar +from typing import Collection, Generic, Iterable, Optional, Type, TypeVar class InfluenceMode(str, Enum): - r""" + """ Enum representation for the types of influence. Attributes: @@ -28,10 +30,10 @@ def __init__(self, mode: str): class NotFittedException(ValueError): - def __init__(self): + def __init__(self, object_type: Type): super().__init__( - f"Objects of type InfluenceFunctionModel must be fitted before calling " - f"influence methods. " + f"Objects of type {object_type} must be fitted before calling " + f"methods. " f"Call method fit with appropriate input." ) @@ -70,7 +72,7 @@ def is_fitted(self): """Override this, to expose the fitting status of the instance.""" @abstractmethod - def fit(self, data: DataLoaderType): + def fit(self, data: DataLoaderType) -> InfluenceFunctionModel: """ Override this method to fit the influence function model to training data, e.g. pre-compute hessian matrix or matrix decompositions @@ -84,7 +86,7 @@ def fit(self, data: DataLoaderType): def influence_factors(self, x: TensorType, y: TensorType) -> TensorType: if not self.is_fitted: - raise NotFittedException() + raise NotFittedException(type(self)) return self._influence_factors(x, y) @abstractmethod @@ -114,7 +116,7 @@ def influences( mode: InfluenceMode = InfluenceMode.Up, ) -> TensorType: if not self.is_fitted: - raise NotFittedException() + raise NotFittedException(type(self)) return self._influences(x_test, y_test, x, y, mode) @abstractmethod @@ -150,7 +152,7 @@ def _influences( if None, use $x=x_{test}$ y: optional label tensor to compute gradients mode: enum value of [InfluenceMode] - [pydvl.influence.base_influence_modl.InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: Tensor representing the element-wise scalar products for the provided batch @@ -180,16 +182,16 @@ def influences_from_factors( of the batch $(x, y)$. Args: - z_test_factors: pre-computed array, approximating + z_test_factors: pre-computed array, approximating $H^{-1}\nabla_{\theta} \ell(y_{\text{test}}, f_{\theta}(x_{\text{test}}))$ - x: model input to use in the gradient computations + x: model input to use in the gradient computations $\nabla_{\theta}\ell(y, f_{\theta}(x))$, resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$, if None, use $x=x_{\text{test}}$ - y: label tensor to compute gradients - mode: enum value of [InfluenceMode] - [pydvl.influence.base_influence_modl.InfluenceMode] + y: label tensor to compute gradients + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: Tensor representing the element-wise scalar products for the provided batch diff --git a/src/pydvl/influence/influence_calculator.py b/src/pydvl/influence/influence_calculator.py index dd2b4383f..7e1186f29 100644 --- a/src/pydvl/influence/influence_calculator.py +++ b/src/pydvl/influence/influence_calculator.py @@ -1,7 +1,7 @@ """ This module provides functionality for calculating influences for large amount of data. The computation is based on a chunk computation model in the form of an instance of -[InfluenceFunctionModel][pydvl.influence.base_influence_model.InfluenceFunctionModel], +[InfluenceFunctionModel][pydvl.influence.base_influence_function_model.InfluenceFunctionModel], which is mapped over collection of chunks. """ @@ -101,7 +101,7 @@ class DaskInfluenceCalculator: Dask for distributed computing and parallel processing. It requires an influence computation model of type [InfluenceFunctionModel] - [pydvl.influence.base_influence_model.InfluenceFunctionModel], + [pydvl.influence.base_influence_function_model.InfluenceFunctionModel], which defines how influences are computed on a chunk of data. Essentially, this class functions by mapping the influence function model across the various chunks of a [dask.array.Array][dask.array.Array] @@ -110,7 +110,7 @@ class DaskInfluenceCalculator: Args: influence_function_model: instance of type [InfluenceFunctionModel] - [pydvl.influence.base_influence_model.InfluenceFunctionModel], that + [pydvl.influence.base_influence_function_model.InfluenceFunctionModel], that specifies the computation logic for influence on data chunks. It's a pivotal part of the calculator, determining how influence is computed and applied across the data array. @@ -140,7 +140,7 @@ class DaskInfluenceCalculator: Make sure to set `threads_per_worker=1`, when using the distributed scheduler for computing, if your implementation of [InfluenceFunctionModel] - [pydvl.influence.base_influence_model.InfluenceFunctionModel] + [pydvl.influence.base_influence_function_model.InfluenceFunctionModel] is not thread-safe. ```python client = Client(threads_per_worker=1) @@ -155,8 +155,8 @@ class DaskInfluenceCalculator: from pydvl.influence import DaskInfluenceCalculator from pydvl.influence.torch import CgInfluence from pydvl.influence.torch.util import ( - torch_dataset_to_dask_array, - TorchNumpyConverter, + torch_dataset_to_dask_array, + TorchNumpyConverter, ) from distributed import Client @@ -332,8 +332,8 @@ def influences( resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$, if None, use $x=x_{\text{test}}$ y: optional label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.base_influence_model.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: [dask.array.Array][dask.array.Array] representing the element-wise scalar @@ -457,8 +457,8 @@ def influences_from_factors( resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$, if None, use $x=x_{\text{test}}$ y: optional label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.twice_differentiable.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: [dask.array.Array][dask.array.Array] representing the element-wise scalar @@ -547,13 +547,13 @@ class SequentialInfluenceCalculator: or distributed processing is not required or not feasible. The core functionality of this class is to apply a specified influence computation model, of type [InfluenceFunctionModel] - [pydvl.influence.base_influence_model.InfluenceFunctionModel], to batches of data + [pydvl.influence.base_influence_function_model.InfluenceFunctionModel], to batches of data one at a time. Args: influence_function_model: An instance of type [InfluenceFunctionModel] - [pydvl.influence.base_influence_model.InfluenceFunctionModel], that + [pydvl.influence.base_influence_function_model.InfluenceFunctionModel], that specifies the computation logic for influence on data chunks. Example: @@ -628,7 +628,6 @@ def _influences_gen( train_data_iterable: Iterable[Tuple[TensorType, TensorType]], mode: InfluenceMode, ) -> Generator[Generator[TensorType, None, None], None, None]: - for x_test, y_test in iter(test_data_iterable): yield ( self.influence_function_model.influences(x_test, y_test, x, y, mode) @@ -664,8 +663,8 @@ def influences( train_data_iterable: An iterable that returns tuples of tensors. Each tuple consists of a pair of tensors (x, y), representing input data and corresponding targets. - mode: enum value of [InfluenceType] - [pydvl.influence.base_influence_model.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: A lazy data structure representing the chunks of the resulting tensor @@ -686,7 +685,6 @@ def _influences_from_factors_gen( train_data_iterable: Iterable[Tuple[TensorType, TensorType]], mode: InfluenceMode, ): - for z_test_factor in iter(z_test_factors): if isinstance(z_test_factor, list) or isinstance(z_test_factor, tuple): z_test_factor = z_test_factor[0] @@ -724,8 +722,8 @@ def influences_from_factors( train_data_iterable: An iterable that returns tuples of tensors. Each tuple consists of a pair of tensors (x, y), representing input data and corresponding targets. - mode: enum value of [InfluenceType] - [pydvl.influence.twice_differentiable.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: A lazy data structure representing the chunks of the resulting tensor diff --git a/src/pydvl/influence/torch/__init__.py b/src/pydvl/influence/torch/__init__.py index 6caf74d92..3bbd9552c 100644 --- a/src/pydvl/influence/torch/__init__.py +++ b/src/pydvl/influence/torch/__init__.py @@ -4,4 +4,6 @@ DirectInfluence, EkfacInfluence, LissaInfluence, + NystroemSketchInfluence, ) +from .pre_conditioner import JacobiPreConditioner, NystroemPreConditioner diff --git a/src/pydvl/influence/torch/functional.py b/src/pydvl/influence/torch/functional.py index 544f0eff0..1028b6acd 100644 --- a/src/pydvl/influence/torch/functional.py +++ b/src/pydvl/influence/torch/functional.py @@ -28,10 +28,11 @@ import logging from dataclasses import dataclass from functools import partial -from typing import Callable, Dict, Optional, Tuple +from typing import Callable, Dict, Optional, Tuple, Union import torch from scipy.sparse.linalg import ArpackNoConvergence +from torch._C import _LinAlgError from torch.func import functional_call, grad, jvp, vjp from torch.utils.data import DataLoader @@ -47,8 +48,11 @@ "create_per_sample_mixed_derivative_function", "model_hessian_low_rank", "LowRankProductRepresentation", + "randomized_nystroem_approximation", + "model_hessian_nystroem_approximation", ] + logger = logging.getLogger(__name__) @@ -168,8 +172,10 @@ def create_empirical_loss_function( on a given dataset. If we denote the model parameters with \( \theta \), the resulting function approximates: - \[ f(\theta) = \frac{1}{N}\sum_{i=1}^N - \operatorname{loss}(y_i, \operatorname{model}(\theta, x_i)) \] + \[ + f(\theta) = \frac{1}{N}\sum_{i=1}^N + \operatorname{loss}(y_i, \operatorname{model}(\theta, x_i)) + \] for a loss function $\operatorname{loss}$ and a model $\operatorname{model}$ with model parameters $\theta$, where $N$ is the number of all elements provided @@ -259,36 +265,36 @@ def create_hvp_function( Hessian is to be computed. loss: A callable that takes the model's output and target as input and returns the scalar loss. - data_loader: A DataLoader instance that provides batches of data for calculating - the Hessian-vector product. Each batch from the DataLoader is assumed to - return a tuple where the first element - is the model's input and the second element is the target output. - precompute_grad: If True, the full data gradient is precomputed and kept - in memory, which can speed up the hessian vector product computation. - Set this to False, if you can't afford to keep an additional - parameter-sized vector in memory. - use_average: If True, the returned function uses batch-wise computation via - [batch_loss_function][pydvl.influence.torch.functional.batch_loss_function] + data_loader: A DataLoader instance that provides batches of data for + calculating the Hessian-vector product. Each batch from the + DataLoader is assumed to return a tuple where the first element is + the model's input and the second element is the target output. + precompute_grad: If `True`, the full data gradient is precomputed and + kept in memory, which can speed up the hessian vector product + computation. Set this to `False`, if you can't afford to keep the + full computation graph in memory. + use_average: If `True`, the returned function uses batch-wise + computation via + [a batch loss function][pydvl.influence.torch.functional.create_batch_loss_function] and averages the results. - If False, the function uses backpropagation on the full - [empirical_loss_function] - [pydvl.influence.torch.functional.empirical_loss_function], - which is more accurate than averaging the batch hessians, - but probably has a way higher memory usage. + If `False`, the function uses backpropagation on the full + [empirical loss function] + [pydvl.influence.torch.functional.create_empirical_loss_function], + which is more accurate than averaging the batch hessians, but + probably has a way higher memory usage. reverse_only: Whether to use only reverse-mode autodiff or - both forward- and reverse-mode autodiff. - Ignored if precompute_grad is True. - track_gradients: Whether to track gradients for the resulting tensor of the - hessian vector products. + both forward- and reverse-mode autodiff. Ignored if + `precompute_grad` is `True`. + track_gradients: Whether to track gradients for the resulting tensor of + the Hessian-vector products. Returns: - A function that takes a single argument, a vector, and returns the product of - the Hessian of the `loss` function with respect to the `model`'s parameters - and the input vector. + A function that takes a single argument, a vector, and returns the + product of the Hessian of the `loss` function with respect to the + `model`'s parameters and the input vector. """ if precompute_grad: - model_params = {k: p for k, p in model.named_parameters() if p.requires_grad} if use_average: @@ -408,7 +414,7 @@ def hessian( if use_hessian_avg: n_samples = 0 - hessian = to_model_device( + hessian_mat = to_model_device( torch.zeros((n_parameters, n_parameters), dtype=model_dtype), model ) blf = create_batch_loss_function(model, loss) @@ -420,11 +426,11 @@ def flat_input_batch_loss_function( for x, y in iter(data_loader): n_samples += x.shape[0] - hessian += x.shape[0] * torch.func.hessian(flat_input_batch_loss_function)( - flat_params, to_model_device(x, model), to_model_device(y, model) - ) + hessian_mat += x.shape[0] * torch.func.hessian( + flat_input_batch_loss_function + )(flat_params, to_model_device(x, model), to_model_device(y, model)) - hessian /= n_samples + hessian_mat /= n_samples else: def flat_input_empirical_loss(p: torch.Tensor): @@ -432,11 +438,11 @@ def flat_input_empirical_loss(p: torch.Tensor): align_with_model(p, model) ) - hessian = torch.func.jacrev(torch.func.jacrev(flat_input_empirical_loss))( + hessian_mat = torch.func.jacrev(torch.func.jacrev(flat_input_empirical_loss))( flat_params ) - return hessian + return hessian_mat def create_per_sample_loss_function( @@ -772,6 +778,7 @@ def model_hessian_low_rank( tol: float = 1e-6, max_iter: Optional[int] = None, eigen_computation_on_gpu: bool = False, + precompute_grad: bool = False, ) -> LowRankProductRepresentation: r""" Calculates a low-rank approximation of the Hessian matrix of the model's @@ -807,6 +814,10 @@ def model_hessian_low_rank( small rank_estimate to fit your device's memory. If False, the eigen pair approximation is executed on the CPU by scipy wrapper to ARPACK. + precompute_grad: If True, the full data gradient is precomputed and kept + in memory, which can speed up the hessian vector product computation. + Set this to False, if you can't afford to keep the full computation graph + in memory. Returns: [LowRankProductRepresentation] @@ -814,7 +825,9 @@ def model_hessian_low_rank( instance that contains the top (up until rank_estimate) eigenvalues and corresponding eigenvectors of the Hessian. """ - raw_hvp = create_hvp_function(model, loss, training_data, use_average=True) + raw_hvp = create_hvp_function( + model, loss, training_data, use_average=True, precompute_grad=precompute_grad + ) n_params = sum([p.numel() for p in model.parameters() if p.requires_grad]) device = next(model.parameters()).device return lanzcos_low_rank_hessian_approx( @@ -828,3 +841,148 @@ def model_hessian_low_rank( device=device, eigen_computation_on_gpu=eigen_computation_on_gpu, ) + + +def randomized_nystroem_approximation( + mat_mat_prod: Union[torch.Tensor, Callable[[torch.Tensor], torch.Tensor]], + input_dim: int, + rank: int, + input_type: torch.dtype, + shift_func: Optional[Callable[[torch.Tensor], torch.Tensor]] = None, + mat_vec_device: torch.device = torch.device("cpu"), +) -> LowRankProductRepresentation: + r""" + Given a matrix vector product function (representing a symmetric positive definite + matrix $A$ ), computes a random Nyström low rank approximation of + $A$ in factored form, i.e. + + $$ A_{\text{nys}} = (A \Omega)(\Omega^T A \Omega)^{\dagger}(A \Omega)^T + = U \Sigma U^T $$ + + where $\Omega$ is a standard normal random matrix. + + Args: + mat_mat_prod: A callable representing the matrix vector product + input_dim: dimension of the input for the matrix vector product + input_type: data_type of inputs + rank: rank of the approximation + shift_func: optional function for computing the stabilizing shift in the + construction of the randomized nystroem approximation, defaults to + + $$ \sqrt{\operatorname{\text{input_dim}}} \cdot + \varepsilon(\operatorname{\text{input_type}}) \cdot \|A\Omega\|_2,$$ + + where $\varepsilon(\operatorname{\text{input_type}})$ is the value of the + machine precision corresponding to the data type. + mat_vec_device: device where the matrix vector product has to be executed + + Returns: + object containing, $U$ and $\Sigma$ + """ + + if shift_func is None: + + def shift_func(x: torch.Tensor): + return ( + torch.sqrt(torch.as_tensor(input_dim)) + * torch.finfo(x.dtype).eps + * torch.linalg.norm(x) + ) + + _mat_mat_prod: Callable[[torch.Tensor], torch.Tensor] + + if isinstance(mat_mat_prod, torch.Tensor): + + def _mat_mat_prod(x: torch.Tensor): + return mat_mat_prod @ x + + else: + _mat_mat_prod = mat_mat_prod + + random_sample_matrix = torch.randn( + input_dim, rank, device=mat_vec_device, dtype=input_type + ) + random_sample_matrix, _ = torch.linalg.qr(random_sample_matrix) + + sketch_mat = _mat_mat_prod(random_sample_matrix) + + shift = shift_func(sketch_mat) + sketch_mat += shift * random_sample_matrix + cholesky_mat = torch.matmul(random_sample_matrix.t(), sketch_mat) + try: + triangular_mat = torch.linalg.cholesky(cholesky_mat) + except _LinAlgError as e: + logger.warning( + f"Encountered error in cholesky decomposition: {e}.\n " + f"Increasing shift by smallest eigenvalue and re-compute" + ) + eigen_vals, eigen_vectors = torch.linalg.eigh(cholesky_mat) + shift += torch.abs(torch.min(eigen_vals)) + eigen_vals += shift + triangular_mat = torch.linalg.cholesky( + torch.mm(eigen_vectors, torch.mm(torch.diag(eigen_vals), eigen_vectors.T)) + ) + + svd_input = torch.linalg.solve_triangular( + triangular_mat.t(), sketch_mat, upper=True, left=False + ) + left_singular_vecs, singular_vals, _ = torch.linalg.svd( + svd_input, full_matrices=False + ) + singular_vals = torch.clamp(singular_vals**2 - shift, min=0) + + return LowRankProductRepresentation(singular_vals, left_singular_vecs) + + +def model_hessian_nystroem_approximation( + model: torch.nn.Module, + loss: Callable[[torch.Tensor, torch.Tensor], torch.Tensor], + data_loader: DataLoader, + rank: int, + shift_func: Optional[Callable[[torch.Tensor], torch.Tensor]] = None, +) -> LowRankProductRepresentation: + r""" + Given a model, loss and a data_loader, computes a random Nyström low rank approximation of + the corresponding Hessian matrix in factored form, i.e. + + $$ H_{\text{nys}} = (H \Omega)(\Omega^T H \Omega)^{+}(H \Omega)^T + = U \Sigma U^T $$ + + Args: + model: A PyTorch model instance. The Hessian will be calculated with respect to + this model's parameters. + loss : A callable that computes the loss. + data_loader: A DataLoader instance that provides the model's training data. + Used in calculating the Hessian-vector products. + rank: rank of the approximation + shift_func: optional function for computing the stabilizing shift in the + construction of the randomized nystroem approximation, defaults to + + $$ \sqrt{\operatorname{\text{input_dim}}} \cdot + \varepsilon(\operatorname{\text{input_type}}) \cdot \|A\Omega\|_2,$$ + + where $\varepsilon(\operatorname{\text{input_type}})$ is the value of the + machine precision corresponding to the data type. + + Returns: + object containing, $U$ and $\Sigma$ + """ + + model_hvp = create_hvp_function( + model, loss, data_loader, precompute_grad=False, use_average=True + ) + device = next((p.device for p in model.parameters())) + dtype = next((p.dtype for p in model.parameters())) + in_dim = sum((p.numel() for p in model.parameters() if p.requires_grad)) + + def model_hessian_mat_mat_prod(x: torch.Tensor): + return torch.func.vmap(model_hvp, in_dims=1, randomness="same")(x).t() + + return randomized_nystroem_approximation( + model_hessian_mat_mat_prod, + in_dim, + rank, + dtype, + shift_func=shift_func, + mat_vec_device=device, + ) diff --git a/src/pydvl/influence/torch/influence_function_model.py b/src/pydvl/influence/torch/influence_function_model.py index 287291032..f85c0d4f0 100644 --- a/src/pydvl/influence/torch/influence_function_model.py +++ b/src/pydvl/influence/torch/influence_function_model.py @@ -32,7 +32,9 @@ create_per_sample_mixed_derivative_function, hessian, model_hessian_low_rank, + model_hessian_nystroem_approximation, ) +from .pre_conditioner import PreConditioner from .util import ( EkfacRepresentation, empirical_cross_entropy_loss_fn, @@ -65,8 +67,15 @@ def __init__( self._model_params = { k: p.detach() for k, p in self.model.named_parameters() if p.requires_grad } + self._model_dtype = next( + (p.dtype for p in model.parameters() if p.requires_grad) + ) super().__init__() + @property + def model_dtype(self): + return self._model_dtype + @property def n_parameters(self): return self._n_parameters @@ -111,31 +120,33 @@ def influences( Compute the approximation of \[ - \langle H^{-1}\nabla_{theta} \ell(y_{\text{test}}, f_{\theta}(x_{\text{test}})), - \nabla_{\theta} \ell(y, f_{\theta}(x)) \rangle + \langle H^{-1}\nabla_{\theta} \ell(y_{\text{test}}, + f_{\theta}(x_{\text{test}})), \nabla_{\theta} \ell(y, f_{\theta}(x))\rangle \] for the case of up-weighting influence, resp. \[ - \langle H^{-1}\nabla_{theta} \ell(y_{\text{test}}, f_{\theta}(x_{\text{test}})), + \langle H^{-1}\nabla_{\theta} \ell(y_{\text{test}}, f_{\theta}(x_{\text{test}})), \nabla_{x} \nabla_{\theta} \ell(y, f_{\theta}(x)) \rangle \] - for the perturbation type influence case. + for the perturbation type influence case. For all input tensors it is assumed, + that the first dimension is the batch dimension (in case, you want to provide + a single sample z, call z.unsqueeze(0) if no batch dimension is present). Args: x_test: model input to use in the gradient computations - of $H^{-1}\nabla_{theta} \ell(y_{\text{test}}, + of $H^{-1}\nabla_{\theta} \ell(y_{\text{test}}, f_{\theta}(x_{\text{test}}))$ y_test: label tensor to compute gradients x: optional model input to use in the gradient computations - $\nabla_{theta}\ell(y, f_{\theta}(x))$, - resp. $\nabla_{x}\nabla_{theta}\ell(y, f_{\theta}(x))$, + $\nabla_{\theta}\ell(y, f_{\theta}(x))$, + resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$, if None, use $x=x_{\text{test}}$ y: optional label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.base_influence_model.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: Tensor representing the element-wise scalar products for the provided batch @@ -152,14 +163,12 @@ def _influences( y: Optional[torch.Tensor] = None, mode: InfluenceMode = InfluenceMode.Up, ) -> torch.Tensor: - if not self.is_fitted: raise ValueError( "Instance must be fitted before calling influence methods on it" ) if x is None: - if y is not None: raise ValueError( "Providing labels y, without providing model input x " @@ -212,7 +221,6 @@ def _non_symmetric_values( def _symmetric_values( self, x: torch.Tensor, y: torch.Tensor, mode: InfluenceMode ) -> torch.Tensor: - grad = self._loss_grad(x, y) fac = self._solve_hvp(grad) @@ -231,6 +239,9 @@ def influence_factors(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: \[ H^{-1}\nabla_{\theta} \ell(y, f_{\theta}(x)) \] where the gradient is meant to be per sample of the batch $(x, y)$. + For all input tensors it is assumed, + that the first dimension is the batch dimension (in case, you want to provide + a single sample z, call z.unsqueeze(0) if no batch dimension is present). Args: x: model input to use in the gradient computations @@ -243,7 +254,6 @@ def influence_factors(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: return super().influence_factors(x, y) def _influence_factors(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: - if not self.is_fitted: raise ValueError( "Instance must be fitted before calling influence methods on it" @@ -272,18 +282,20 @@ def influences_from_factors( \nabla_{x} \nabla_{\theta} \ell(y, f_{\theta}(x)) \rangle \] for the perturbation type influence case. The gradient is meant to be per sample - of the batch $(x, y)$. + of the batch $(x, y)$. For all input tensors it is assumed, + that the first dimension is the batch dimension (in case, you want to provide + a single sample z, call z.unsqueeze(0) if no batch dimension is present). Args: - z_test_factors: pre-computed tensor, approximating + z_test_factors: pre-computed tensor, approximating $H^{-1}\nabla_{\theta} \ell(y_{\text{test}}, - f_{\theta}(x_{\text{test}}))$ - x: model input to use in the gradient computations + f_{\theta}(x_{\text{test}}))$ + x: model input to use in the gradient computations $\nabla_{\theta}\ell(y, f_{\theta}(x))$, resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$ - y: label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.twice_differentiable.InfluenceType] + y: label tensor to compute gradients + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: Tensor representing the element-wise scalar products for the provided batch @@ -326,7 +338,10 @@ class DirectInfluence(TorchInfluenceFunctionModel): with \(H\) being the model hessian. Args: - model: instance of [torch.nn.Module][torch.nn.Module]. + model: A PyTorch model. The Hessian will be calculated with respect to + this model's parameters. + loss: A callable that takes the model's output and target as input and returns + the scalar loss. hessian_regularization: Regularization of the hessian. """ @@ -350,14 +365,13 @@ def is_fitted(self): def fit(self, data: DataLoader) -> DirectInfluence: """ - Compute the hessian matrix based on a provided dataloader + Compute the hessian matrix based on a provided dataloader. Args: - data: Instance of [torch.utils.data.Dataloader] - [torch.utils.data.Dataloader] + data: The data to compute the Hessian with. Returns: - The fitted instance + The fitted instance. """ self.hessian = hessian(self.model, self.loss, data) return self @@ -397,12 +411,12 @@ def influences( resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$, if None, use $x=x_{\text{test}}$ y: optional label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.base_influence_model.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: - [torch.nn.Tensor][torch.nn.Tensor] representing the element-wise - scalar products for the provided batch. + A tensor representing the element-wise scalar products for the + provided batch. """ return super().influences(x_test, y_test, x, y, mode=mode) @@ -430,15 +444,25 @@ class CgInfluence(TorchInfluenceFunctionModel): [Conjugate Gradient][conjugate-gradient]. Args: - model: Instance of [torch.nn.Module][torch.nn.Module]. + model: A PyTorch model. The Hessian will be calculated with respect to + this model's parameters. loss: A callable that takes the model's output and target as input and returns the scalar loss. - hessian_regularization: Regularization of the hessian. + hessian_regularization: Optional regularization parameter added + to the Hessian-vector product for numerical stability. x0: Initial guess for hvp. If None, defaults to b. rtol: Maximum relative tolerance of result. atol: Absolute tolerance of result. maxiter: Maximum number of iterations. If None, defaults to 10*len(b). progress: If True, display progress bars. + precompute_grad: If True, the full data gradient is precomputed and kept + in memory, which can speed up the hessian vector product computation. + Set this to False, if you can't afford to keep the full computation graph + in memory. + pre_conditioner: Optional pre-conditioner to improve convergence of conjugate + gradient method + use_block_cg: If True, use block variant of conjugate gradient method, which + solves several right hand sides simultaneously """ @@ -452,8 +476,14 @@ def __init__( atol: float = 1e-7, maxiter: Optional[int] = None, progress: bool = False, + precompute_grad: bool = False, + pre_conditioner: Optional[PreConditioner] = None, + use_block_cg: bool = False, ): super().__init__(model, loss) + self.use_block_cg = use_block_cg + self.pre_conditioner = pre_conditioner + self.precompute_grad = precompute_grad self.progress = progress self.maxiter = maxiter self.atol = atol @@ -472,6 +502,24 @@ def is_fitted(self): def fit(self, data: DataLoader) -> CgInfluence: self.train_dataloader = data + if self.pre_conditioner is not None: + hvp = create_hvp_function( + self.model, + self.loss, + self.train_dataloader, + precompute_grad=self.precompute_grad, + ) + + def model_hessian_mat_mat_prod(x: torch.Tensor): + return torch.func.vmap(hvp, in_dims=1, randomness="same")(x).t() + + self.pre_conditioner.fit( + model_hessian_mat_mat_prod, + self.n_parameters, + self.model_dtype, + self.model_device, + self.hessian_regularization, + ) return self @log_duration @@ -484,7 +532,7 @@ def influences( mode: InfluenceMode = InfluenceMode.Up, ) -> torch.Tensor: r""" - Compute approximation of + Compute an approximation of \[ \langle H^{-1}\nabla_{\theta} \ell(y_{\text{test}}, f_{\theta}(x_{\text{test}})), @@ -496,9 +544,9 @@ def influences( f_{\theta}(x_{\text{test}})), \nabla_{x} \nabla_{\theta} \ell(y, f_{\theta}(x)) \rangle \] - for the perturbation type influence case. The approximate action of $H^{-1}$ - is achieved via the [conjugate gradient method] - (https://en.wikipedia.org/wiki/Conjugate_gradient_method). + for the case of perturbation-type influence. The approximate action of + $H^{-1}$ is achieved via the [conjugate gradient + method](https://en.wikipedia.org/wiki/Conjugate_gradient_method). Args: x_test: model input to use in the gradient computations of @@ -510,12 +558,12 @@ def influences( resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$, if None, use $x=x_{\text{test}}$ y: optional label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.base_influence_model.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: - [torch.nn.Tensor][torch.nn.Tensor] representing the element-wise - scalar products for the provided batch. + A tensor representing the element-wise scalar products for the + provided batch. """ return super().influences(x_test, y_test, x, y, mode=mode) @@ -525,35 +573,50 @@ def _solve_hvp(self, rhs: torch.Tensor) -> torch.Tensor: if len(self.train_dataloader) == 0: raise ValueError("Training dataloader must not be empty.") - hvp = create_hvp_function(self.model, self.loss, self.train_dataloader) + if self.use_block_cg: + return self._solve_pbcg(rhs) + + hvp = create_hvp_function( + self.model, + self.loss, + self.train_dataloader, + precompute_grad=self.precompute_grad, + ) def reg_hvp(v: torch.Tensor): return hvp(v) + self.hessian_regularization * v.type(rhs.dtype) + y_norm = torch.linalg.norm(rhs, dim=0) + + stopping_val = torch.clamp(self.rtol**2 * y_norm, min=self.atol**2) + batch_cg = torch.zeros_like(rhs) - for idx, bi in enumerate( - tqdm(rhs, disable=not self.progress, desc="Conjugate gradient") + for idx, (bi, _tol) in enumerate( + tqdm( + zip(rhs, stopping_val), + disable=not self.progress, + desc="Conjugate gradient", + ) ): - batch_result = self._solve_cg( + batch_result = self._solve_pcg( reg_hvp, bi, + tol=_tol, x0=self.x0, - rtol=self.rtol, - atol=self.atol, maxiter=self.maxiter, ) batch_cg[idx] = batch_result + return batch_cg - @staticmethod - def _solve_cg( + def _solve_pcg( + self, hvp: Callable[[torch.Tensor], torch.Tensor], b: torch.Tensor, *, + tol: float, x0: Optional[torch.Tensor] = None, - rtol: float = 1e-7, - atol: float = 1e-7, maxiter: Optional[int] = None, ) -> torch.Tensor: r""" @@ -568,7 +631,7 @@ def _solve_cg( maxiter: Maximum number of iterations. If None, defaults to 10*len(b). Returns: - [torch.nn.Tensor][torch.nn.Tensor] representing the solution of \(Ax=b\). + A tensor with the solution of \(Ax=b\). """ if x0 is None: @@ -576,27 +639,123 @@ def _solve_cg( if maxiter is None: maxiter = len(b) * 10 - y_norm = torch.sum(torch.matmul(b, b)).item() - stopping_val = max([rtol**2 * y_norm, atol**2]) - x = x0 - p = r = (b - hvp(x)).squeeze() - gamma = torch.sum(torch.matmul(r, r)).item() + + r0 = b - hvp(x) + + if self.pre_conditioner is not None: + p = z0 = self.pre_conditioner.solve(r0) + else: + p = z0 = r0 for k in range(maxiter): - if gamma < stopping_val: + if torch.norm(r0) < tol: break - Ap = hvp(p).squeeze() - alpha = gamma / torch.sum(torch.matmul(p, Ap)).item() + Ap = hvp(p) + alpha = torch.dot(r0, z0) / torch.dot(p, Ap) x += alpha * p - r -= alpha * Ap - gamma_ = torch.sum(torch.matmul(r, r)).item() - beta = gamma_ / gamma - gamma = gamma_ - p = r + beta * p + r = r0 - alpha * Ap + + if self.pre_conditioner is not None: + z = self.pre_conditioner.solve(r) + else: + z = r + + beta = torch.dot(r, z) / torch.dot(r0, z0) + + r0 = r + p = z + beta * p + z0 = z return x + def _solve_pbcg( + self, + rhs: torch.Tensor, + ): + hvp = create_hvp_function( + self.model, + self.loss, + self.train_dataloader, + precompute_grad=self.precompute_grad, + ) + + # The block variant of conjugate gradient is known to suffer from breakdown, + # due to the possibility of rank deficiency of the iterates of the parameter + # matrix P^tAP, which destabilizes the direct solver. + # The paper `Randomized Nyström Preconditioning, + # Frangella, Zachary and Tropp, Joel A. and Udell, Madeleine, + # SIAM J. Matrix Anal. Appl., 2023` + # proposes a simple orthogonalization pre-processing. However, we observed, that + # this stabilization only worked for double precision. We thus implement + # a different stabilization strategy described in + # `A breakdown-free block conjugate gradient method, Ji, Hao and Li, Yaohang, + # BIT Numerical Mathematics, 2017` + + def mat_mat(x: torch.Tensor): + return torch.vmap( + lambda u: hvp(u) + self.hessian_regularization * u, + in_dims=1, + randomness="same", + )(x) + + X = torch.clone(rhs.T) + + R = (rhs - mat_mat(X)).T + Z = R if self.pre_conditioner is None else self.pre_conditioner.solve(R) + P, _, _ = torch.linalg.svd(Z, full_matrices=False) + active_indices = torch.as_tensor(list(range(X.shape[-1])), dtype=torch.long) + + maxiter = self.maxiter if self.maxiter is not None else len(rhs) * 10 + y_norm = torch.linalg.norm(rhs, dim=1) + tol = torch.clamp(self.rtol**2 * y_norm, min=self.atol**2) + + # In the case the parameter dimension is smaller than the number of right + # hand sides, we do not shrink the indices due to resulting wrong + # dimensionality of the svd decomposition. We consider this an edge case, which + # does not need optimization + shrink_finished_indices = rhs.shape[0] <= rhs.shape[1] + + for k in range(maxiter): + Q = mat_mat(P).T + p_t_ap = P.T @ Q + alpha = torch.linalg.solve(p_t_ap, P.T @ R) + X[:, active_indices] += P @ alpha + R -= Q @ alpha + + B = torch.linalg.norm(R, dim=0) + non_finished_indices = torch.nonzero(B > tol) + num_remaining_indices = non_finished_indices.numel() + non_finished_indices = non_finished_indices.squeeze() + + if num_remaining_indices == 1: + non_finished_indices = non_finished_indices.unsqueeze(-1) + + if num_remaining_indices == 0: + break + + # Reduce problem size by removing finished columns from the iteration + if shrink_finished_indices: + active_indices = active_indices[non_finished_indices] + R = R[:, non_finished_indices] + P = P[:, non_finished_indices] + Q = Q[:, non_finished_indices] + p_t_ap = p_t_ap[:, non_finished_indices][non_finished_indices, :] + tol = tol[non_finished_indices] + + Z = R if self.pre_conditioner is None else self.pre_conditioner.solve(R) + beta = -torch.linalg.solve(p_t_ap, Q.T @ Z) + Z_tmp = Z + P @ beta + + if Z_tmp.ndim == 1: + Z_tmp = Z_tmp.unsqueeze(-1) + + # Orthogonalization search directions to stabilize the action of + # (P^tAP)^{-1} + P, _, _ = torch.linalg.svd(Z_tmp, full_matrices=False) + + return X.T + class LissaInfluence(TorchInfluenceFunctionModel): r""" @@ -613,8 +772,12 @@ class LissaInfluence(TorchInfluenceFunctionModel): [linear-time-stochastic-second-order-approximation-lissa] Args: - model: instance of [torch.nn.Module][torch.nn.Module]. - hessian_regularization: Regularization of the hessian. + model: A PyTorch model. The Hessian will be calculated with respect to + this model's parameters. + loss: A callable that takes the model's output and target as input and returns + the scalar loss. + hessian_regularization: Optional regularization parameter added + to the Hessian-vector product for numerical stability. maxiter: Maximum number of iterations. dampen: Dampening factor, defaults to 0 for no dampening. scale: Scaling factor, defaults to 10. @@ -659,7 +822,6 @@ def fit(self, data: DataLoader) -> LissaInfluence: @log_duration def _solve_hvp(self, rhs: torch.Tensor) -> torch.Tensor: - h_estimate = self.h0 if self.h0 is not None else torch.clone(rhs) shuffled_training_data = DataLoader( @@ -692,7 +854,8 @@ def lissa_step( ) for _ in tqdm(range(self.maxiter), disable=not self.progress, desc="Lissa"): x, y = next(iter(shuffled_training_data)) - # grad_xy = model.grad(x, y, create_graph=True) + x = x.to(self.model_device) + y = y.to(self.model_device) reg_hvp = ( lambda v: b_hvp(model_params, x, y, v) + self.hessian_regularization * v ) @@ -729,8 +892,10 @@ class ArnoldiInfluence(TorchInfluenceFunctionModel): For more information, see [Arnoldi][arnoldi]. Args: - model: Instance of [torch.nn.Module][torch.nn.Module]. - The Hessian will be calculated with respect to this model's parameters. + model: A PyTorch model. The Hessian will be calculated with respect to + this model's parameters. + loss: A callable that takes the model's output and target as input and returns + the scalar loss. hessian_regularization: Optional regularization parameter added to the Hessian-vector product for numerical stability. rank_estimate: The number of eigenvalues and corresponding eigenvectors @@ -748,21 +913,25 @@ class ArnoldiInfluence(TorchInfluenceFunctionModel): is appropriate for device memory. If False, the eigen pair approximation is executed on the CPU by the scipy wrapper to ARPACK. + precompute_grad: If True, the full data gradient is precomputed and kept + in memory, which can speed up the hessian vector product computation. + Set this to False, if you can't afford to keep the full computation graph + in memory. """ low_rank_representation: LowRankProductRepresentation def __init__( self, - model, - loss, + model: nn.Module, + loss: Callable[[torch.Tensor, torch.Tensor], torch.Tensor], hessian_regularization: float = 0.0, rank_estimate: int = 10, krylov_dimension: Optional[int] = None, tol: float = 1e-6, max_iter: Optional[int] = None, eigen_computation_on_gpu: bool = False, + precompute_grad: bool = False, ): - super().__init__(model, loss) self.hessian_regularization = hessian_regularization self.rank_estimate = rank_estimate @@ -770,6 +939,7 @@ def __init__( self.max_iter = max_iter self.krylov_dimension = krylov_dimension self.eigen_computation_on_gpu = eigen_computation_on_gpu + self.precompute_grad = precompute_grad @property def is_fitted(self): @@ -787,10 +957,10 @@ def fit(self, data: DataLoader) -> ArnoldiInfluence: of the Hessian defined by the provided data loader. Args: - data: Instance of [torch.utils.data.Dataloader][torch.utils.data.Dataloader] + data: The data to compute the Hessian with. Returns: - The fitted instance + The fitted instance. """ low_rank_representation = model_hessian_low_rank( @@ -803,6 +973,7 @@ def fit(self, data: DataLoader) -> ArnoldiInfluence: tol=self.tol, max_iter=self.max_iter, eigen_computation_on_gpu=self.eigen_computation_on_gpu, + precompute_grad=self.precompute_grad, ) self.low_rank_representation = low_rank_representation.to(self.model_device) return self @@ -815,20 +986,19 @@ def _non_symmetric_values( y: torch.Tensor, mode: InfluenceMode = InfluenceMode.Up, ) -> torch.Tensor: - if mode == InfluenceMode.Up: mjp = create_matrix_jacobian_product_function( self.model, self.loss, self.low_rank_representation.projections.T ) left = mjp(self.model_params, x_test, y_test) - regularized_eigenvalues = ( + inverse_regularized_eigenvalues = 1.0 / ( self.low_rank_representation.eigen_vals + self.hessian_regularization ) - right = torch.diag_embed(1.0 / regularized_eigenvalues) @ mjp( + right = mjp( self.model_params, x, y - ) + ) * inverse_regularized_eigenvalues.unsqueeze(-1) values = torch.einsum("ij, ik -> jk", left, right) elif mode == InfluenceMode.Perturbation: factors = self.influence_factors(x_test, y_test) @@ -840,15 +1010,14 @@ def _non_symmetric_values( def _symmetric_values( self, x: torch.Tensor, y: torch.Tensor, mode: InfluenceMode ) -> torch.Tensor: - if mode == InfluenceMode.Up: left = create_matrix_jacobian_product_function( self.model, self.loss, self.low_rank_representation.projections.T )(self.model_params, x, y) - regularized_eigenvalues = ( + inverse_regularized_eigenvalues = 1.0 / ( self.low_rank_representation.eigen_vals + self.hessian_regularization ) - right = torch.diag_embed(1.0 / regularized_eigenvalues) @ left + right = left * inverse_regularized_eigenvalues.unsqueeze(-1) values = torch.einsum("ij, ik -> jk", left, right) elif mode == InfluenceMode.Perturbation: factors = self.influence_factors(x, y) @@ -859,14 +1028,13 @@ def _symmetric_values( @log_duration def _solve_hvp(self, rhs: torch.Tensor) -> torch.Tensor: - - regularized_eigenvalues = ( + inverse_regularized_eigenvalues = 1.0 / ( self.low_rank_representation.eigen_vals + self.hessian_regularization ) + projected_rhs = self.low_rank_representation.projections.t() @ rhs.t() result = self.low_rank_representation.projections @ ( - torch.diag_embed(1.0 / regularized_eigenvalues) - @ (self.low_rank_representation.projections.t() @ rhs.t()) + projected_rhs * inverse_regularized_eigenvalues.unsqueeze(-1) ) return result.t() @@ -879,17 +1047,21 @@ def to(self, device: torch.device): class EkfacInfluence(TorchInfluenceFunctionModel): r""" - Approximately solves the linear system Hx = b, where H is the Hessian of a model with the empirical - categorical cross entropy as loss function and b is the given right-hand side vector. - It employs the EK-FAC method [@george2018fast], which is based on the kronecker - factorization of the Hessian first introduced in [@martens2015optimizing]. + Approximately solves the linear system Hx = b, where H is the Hessian of a model + with the empirical categorical cross entropy as loss function and b is the given + right-hand side vector. + It employs the EK-FAC method, which is based on the kronecker + factorization of the Hessian. + Contrary to the other influence function methods, this implementation can only be used for classification tasks with a cross entropy loss function. However, it is much faster than the other methods and can be used efficiently for very large - datasets and models. For more information, see [Eigenvalue Corrected K-FAC][ekfac]. + datasets and models. For more information, + see [Eigenvalue Corrected K-FAC][eigenvalue-corrected-k-fac]. Args: - model: Instance of [torch.nn.Module][torch.nn.Module]. + model: A PyTorch model. The Hessian will be calculated with respect to + this model's parameters. update_diagonal: If True, the diagonal values in the ekfac representation are refitted from the training data after calculating the KFAC blocks. This provides a more accurate approximation of the Hessian, but it is @@ -907,7 +1079,6 @@ def __init__( hessian_regularization: float = 0.0, progress: bool = False, ): - super().__init__(model, torch.nn.functional.cross_entropy) self.hessian_regularization = hessian_regularization self.update_diagonal = update_diagonal @@ -1223,7 +1394,7 @@ def influences_by_layer( y: Optional[torch.Tensor] = None, mode: InfluenceMode = InfluenceMode.Up, ) -> Dict[str, torch.Tensor]: - """ + r""" Compute the influence of the data on the test data for each layer of the model. Args: @@ -1236,8 +1407,8 @@ def influences_by_layer( resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$, if None, use $x=x_{\text{test}}$ y: optional label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.base_influence_model.InfluenceType] + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: A dictionary containing the influence of the data on the test data for each @@ -1249,7 +1420,6 @@ def influences_by_layer( ) if x is None: - if y is not None: raise ValueError( "Providing labels y, without providing model input x " @@ -1280,10 +1450,10 @@ def influence_factors_by_layer( x: torch.Tensor, y: torch.Tensor, ) -> Dict[str, torch.Tensor]: - """ + r""" Computes the approximation of - \[H^{-1}\nabla_{\theta} \ell(y, f_{\theta}(x))\] + \[ H^{-1}\nabla_{\theta} \ell(y, f_{\theta}(x)) \] for each layer of the model separately. @@ -1313,7 +1483,7 @@ def influences_from_factors_by_layer( y: torch.Tensor, mode: InfluenceMode = InfluenceMode.Up, ) -> Dict[str, torch.Tensor]: - """ + r""" Computation of \[ \langle z_{\text{test_factors}}, @@ -1324,23 +1494,24 @@ def influences_from_factors_by_layer( \[ \langle z_{\text{test_factors}}, \nabla_{x} \nabla_{\theta} \ell(y, f_{\theta}(x)) \rangle \] - for the perturbation type influence case for each layer of the model separately. - The gradients are meant to be per sample of the batch $(x, y)$. + for the perturbation type influence case for each layer of the model + separately. The gradients are meant to be per sample of the batch $(x, + y)$. Args: z_test_factors: pre-computed tensor, approximating $H^{-1}\nabla_{\theta} \ell(y_{\text{test}}, - f_{\theta}(x_{\text{test}}))$ - x: model input to use in the gradient computations + f_{\theta}(x_{\text{test}}))$ + x: model input to use in the gradient computations $\nabla_{\theta}\ell(y, f_{\theta}(x))$, resp. $\nabla_{x}\nabla_{\theta}\ell(y, f_{\theta}(x))$ - y: label tensor to compute gradients - mode: enum value of [InfluenceType] - [pydvl.influence.twice_differentiable.InfluenceType] + y: label tensor to compute gradients + mode: enum value of [InfluenceMode] + [pydvl.influence.base_influence_function_model.InfluenceMode] Returns: - A dictionary containing the influence of the data on the test data for each - layer of the model, with the layer name as key. + A dictionary containing the influence of the data on the test data + for each layer of the model, with the layer name as key. """ if mode == InfluenceMode.Up: total_grad = self._loss_grad( @@ -1380,9 +1551,9 @@ def _non_symmetric_values_by_layer( mode: InfluenceMode = InfluenceMode.Up, ) -> Dict[str, torch.Tensor]: """ - Similar to _non_symmetric_values, but computes the influence for each layer - separately. Returns a dictionary containing the influence for each layer, - with the layer name as key. + Similar to `_non_symmetric_values`, but computes the influence for each + layer separately. Returns a dictionary containing the influence for each + layer, with the layer name as key. """ if mode == InfluenceMode.Up: if x_test.shape[0] <= x.shape[0]: @@ -1404,7 +1575,7 @@ def _symmetric_values_by_layer( self, x: torch.Tensor, y: torch.Tensor, mode: InfluenceMode ) -> Dict[str, torch.Tensor]: """ - Similar to _symmetric_values, but computes the influence for each layer + Similar to `_symmetric_values`, but computes the influence for each layer separately. Returns a dictionary containing the influence for each layer, with the layer name as key. """ @@ -1469,3 +1640,80 @@ def to(self, device: torch.device): if self.is_fitted: self.ekfac_representation.to(device) return super().to(device) + + +class NystroemSketchInfluence(TorchInfluenceFunctionModel): + r""" + Given a model and training data, it uses a low-rank approximation of the Hessian + (derived via random projection Nyström approximation) in combination with + the [Sherman–Morrison–Woodbury + formula](https://en.wikipedia.org/wiki/Woodbury_matrix_identity) to + calculate the inverse of the Hessian Vector Product. More concrete, it + computes a low-rank approximation + + \begin{align*} + H_{\text{nys}} &= (H\Omega)(\Omega^TH\Omega)^{+}(H\Omega)^T \\\ + &= U \Lambda U^T + \end{align*} + + in factorized form and approximates the action of the inverse Hessian via + + \[ (H_{\text{nys}} + \lambda I)^{-1} = U(\Lambda+\lambda I)U^T + + \frac{1}{\lambda}(I−UU^T). \] + + Args: + model: A PyTorch model. The Hessian will be calculated with respect to + this model's parameters. + loss: A callable that takes the model's output and target as input and returns + the scalar loss. + hessian_regularization: Optional regularization parameter added + to the Hessian-vector product for numerical stability. + rank: rank of the low-rank approximation + + """ + + low_rank_representation: LowRankProductRepresentation + + def __init__( + self, + model: torch.nn.Module, + loss: Callable[[torch.Tensor, torch.Tensor], torch.Tensor], + hessian_regularization: float, + rank: int, + ): + super().__init__(model, loss) + self.hessian_regularization = hessian_regularization + self.rank = rank + + def _solve_hvp(self, rhs: torch.Tensor) -> torch.Tensor: + regularized_eigenvalues = ( + self.low_rank_representation.eigen_vals + self.hessian_regularization + ) + + proj_rhs = self.low_rank_representation.projections.t() @ rhs.t() + inverse_regularized_eigenvalues = 1.0 / regularized_eigenvalues + result = self.low_rank_representation.projections @ ( + proj_rhs * inverse_regularized_eigenvalues.unsqueeze(-1) + ) + + if self.hessian_regularization > 0.0: + result += ( + 1.0 + / self.hessian_regularization + * (rhs.t() - self.low_rank_representation.projections @ proj_rhs) + ) + + return result.t() + + @property + def is_fitted(self): + try: + return self.low_rank_representation is not None + except AttributeError: + return False + + def fit(self, data: DataLoader): + self.low_rank_representation = model_hessian_nystroem_approximation( + self.model, self.loss, data, self.rank + ) + return self diff --git a/src/pydvl/influence/torch/pre_conditioner.py b/src/pydvl/influence/torch/pre_conditioner.py new file mode 100644 index 000000000..4497d81c2 --- /dev/null +++ b/src/pydvl/influence/torch/pre_conditioner.py @@ -0,0 +1,235 @@ +from abc import ABC, abstractmethod +from typing import Callable, Optional + +import torch + +from ..base_influence_function_model import NotFittedException +from .functional import LowRankProductRepresentation, randomized_nystroem_approximation + +__all__ = ["JacobiPreConditioner", "NystroemPreConditioner", "PreConditioner"] + + +class PreConditioner(ABC): + r""" + Abstract base class for implementing pre-conditioners for improving the convergence + of CG for systems of the form + + \[ ( A + \lambda \operatorname{I})x = \operatorname{rhs} \] + + i.e. a matrix $M$ such that $M^{-1}(A + \lambda \operatorname{I})$ has a better + condition number than $A + \lambda \operatorname{I}$. + + """ + + @property + @abstractmethod + def is_fitted(self): + pass + + @abstractmethod + def fit( + self, + mat_mat_prod: Callable[[torch.Tensor], torch.Tensor], + size: int, + dtype: torch.dtype, + device: torch.device, + regularization: float = 0.0, + ): + r""" + Implement this to fit the pre-conditioner to the matrix represented by the + mat_mat_prod + Args: + mat_mat_prod: a callable that computes the matrix-matrix product + size: size of the matrix represented by `mat_mat_prod` + dtype: data type of the matrix represented by `mat_mat_prod` + device: device of the matrix represented by `mat_mat_prod` + regularization: regularization parameter $\lambda$ in the equation + $ ( A + \lambda \operatorname{I})x = \operatorname{rhs} $ + Returns: + self + """ + pass + + def solve(self, rhs: torch.Tensor): + r""" + Solve the equation $M@Z = \operatorname{rhs}$ + Args: + rhs: right hand side of the equation, corresponds to the residuum vector + (or matrix) in the conjugate gradient method + + Returns: + solution $M^{-1}\operatorname{rhs}$ + + """ + if not self.is_fitted: + raise NotFittedException(type(self)) + + return self._solve(rhs) + + @abstractmethod + def _solve(self, rhs: torch.Tensor): + pass + + +class JacobiPreConditioner(PreConditioner): + r""" + Pre-conditioner for improving the convergence of CG for systems of the form + + $$ ( A + \lambda \operatorname{I})x = \operatorname{rhs} $$ + + The JacobiPreConditioner uses the diagonal information of the matrix $A$. + The diagonal elements are not computed directly but estimated via Hutchinson's + estimator. + + $$ M = \frac{1}{m} \sum_{i=1}^m u_i \odot Au_i + \lambda \operatorname{I} $$ + + where $u_i$ are i.i.d. Gaussian random vectors. + Works well in the case the matrix $A + \lambda \operatorname{I}$ is diagonal + dominant. + For more information, see the documentation of + [Conjugate Gradient][conjugate-gradient] + Args: + num_samples_estimator: number of samples to use in computation of + Hutchinson's estimator + """ + + _diag: torch.Tensor + _reg: float + + def __init__(self, num_samples_estimator: int = 1): + self.num_samples_estimator = num_samples_estimator + + @property + def is_fitted(self): + return self._diag is not None and self._reg is not None + + def fit( + self, + mat_mat_prod: Callable[[torch.Tensor], torch.Tensor], + size: int, + dtype: torch.dtype, + device: torch.device, + regularization: float = 0.0, + ): + r""" + Fits by computing an estimate of the diagonal of the matrix represented by + `mat_mat_prod` via Hutchinson's estimator + + Args: + mat_mat_prod: a callable representing the matrix-matrix product + size: size of the square matrix + dtype: needed data type of inputs for the mat_mat_prod + device: needed device for inputs of mat_mat_prod + regularization: regularization parameter + $\lambda$ in $(A+\lambda I)x=b$ + """ + random_samples = torch.randn( + size, self.num_samples_estimator, device=device, dtype=dtype + ) + diagonal_estimate = torch.sum( + torch.mul(random_samples, mat_mat_prod(random_samples)), dim=1 + ) + diagonal_estimate /= self.num_samples_estimator + self._diag = diagonal_estimate + self._reg = regularization + + def _solve(self, rhs: torch.Tensor): + inv_diag = 1.0 / (self._diag + self._reg) + + if rhs.ndim == 1: + return rhs * inv_diag + + return rhs * inv_diag.unsqueeze(-1) + + +class NystroemPreConditioner(PreConditioner): + r""" + Pre-conditioner for improving the convergence of CG for systems of the form + + $$ (A + \lambda \operatorname{I})x = \operatorname{rhs} $$ + + The NystroemPreConditioner computes a low-rank approximation + + $$ A_{\text{nys}} = (A \Omega)(\Omega^T A \Omega)^{\dagger}(A \Omega)^T + = U \Sigma U^T, $$ + + where $(\cdot)^{\dagger}$ denotes the [Moore-Penrose inverse]( + https://en.wikipedia.org/wiki/Moore%E2%80%93Penrose_inverse), + and uses the matrix + + $$ M^{-1} = (\lambda + \sigma_{\text{rank}})U(\Sigma+ + \lambda \operatorname{I})^{-1}U^T+(\operatorname{I} - UU^T) $$ + + for pre-conditioning, where \( \sigma_{\text{rank}} \) is the smallest + eigenvalue of the low-rank approximation. + """ + + _low_rank_approx: LowRankProductRepresentation + _regularization: float + + def __init__(self, rank: int): + self._rank = rank + + @property + def low_rank_approx(self) -> Optional[LowRankProductRepresentation]: + return self._low_rank_approx + + @property + def rank(self): + return self._rank + + @property + def is_fitted(self): + return self._low_rank_approx is not None and self._regularization is not None + + def fit( + self, + mat_mat_prod: Callable[[torch.Tensor], torch.Tensor], + size: int, + dtype: torch.dtype, + device: torch.device, + regularization: float = 0.0, + ): + r""" + Fits by computing a low-rank approximation of the matrix represented by + `mat_mat_prod` via Nystroem approximation + + Args: + mat_mat_prod: a callable representing the matrix-matrix product + size: size of the square matrix + dtype: needed data type of inputs for the mat_mat_prod + device: needed device for inputs of mat_mat_prod + regularization: regularization parameter + $\lambda$ in $(A+\lambda I)x=b$ + """ + + self._low_rank_approx = randomized_nystroem_approximation( + mat_mat_prod, size, self._rank, dtype, mat_vec_device=device + ) + self._regularization = regularization + + def _solve(self, rhs: torch.Tensor): + + rhs_is_one_dim = rhs.ndim == 1 + + rhs_view = torch.atleast_2d(rhs).t() if rhs_is_one_dim else rhs + + regularized_eigenvalues = ( + self._low_rank_approx.eigen_vals + self._regularization + ) + lambda_rank = self._low_rank_approx.eigen_vals[-1] + self._regularization + + proj_rhs = self._low_rank_approx.projections.t() @ rhs_view + + inverse_regularized_eigenvalues = lambda_rank / regularized_eigenvalues + + result = self._low_rank_approx.projections @ ( + proj_rhs * inverse_regularized_eigenvalues.unsqueeze(-1) + ) + + result += rhs_view - self._low_rank_approx.projections @ proj_rhs + + if rhs_is_one_dim: + result = result.squeeze() + + return result diff --git a/src/pydvl/influence/torch/util.py b/src/pydvl/influence/torch/util.py index 394cf535a..079acf9c9 100644 --- a/src/pydvl/influence/torch/util.py +++ b/src/pydvl/influence/torch/util.py @@ -11,6 +11,7 @@ Mapping, Optional, Tuple, + Type, Union, ) @@ -135,12 +136,10 @@ def align_structure( tangent_dict: Dict[str, torch.Tensor] if isinstance(target, dict): - if list(target.keys()) != list(source.keys()): raise ValueError("The keys in 'target' do not match the keys in 'source'.") if [v.shape for v in target.values()] != [v.shape for v in source.values()]: - raise ValueError( "The shapes of the values in 'target' do not match the shapes " "of the values in 'source'." @@ -149,9 +148,7 @@ def align_structure( tangent_dict = target elif isinstance(target, tuple) or isinstance(target, list): - if [v.shape for v in target] != [v.shape for v in source.values()]: - raise ValueError( "'target' is a tuple/list but its elements' shapes do not match " "the shapes of the values in 'source'." @@ -160,7 +157,6 @@ def align_structure( tangent_dict = dict(zip(source.keys(), target)) elif isinstance(target, torch.Tensor): - try: tangent_dict = dict( zip( @@ -249,7 +245,7 @@ def torch_dataset_to_dask_array( dataset: Dataset, chunk_size: int, total_size: Optional[int] = None, - resulting_dtype=np.float32, + resulting_dtype: Type[np.number] = np.float32, ) -> Tuple[da.Array, ...]: """ Construct tuple of dask arrays from a PyTorch dataset, using dask.delayed @@ -350,7 +346,6 @@ def _get_chunk( for chunk, (start, stop) in zip(delayed_chunks, chunk_indices): for tensor_idx, sample_tensor in enumerate(sample): - delayed_tensor = da.from_delayed( dask.delayed(lambda t: t.cpu().numpy())(chunk[tensor_idx]), shape=(stop - start, *sample_tensor.shape), diff --git a/src/pydvl/parallel/backends/joblib.py b/src/pydvl/parallel/backends/joblib.py index 48b1dcd77..264fc2a0b 100644 --- a/src/pydvl/parallel/backends/joblib.py +++ b/src/pydvl/parallel/backends/joblib.py @@ -25,6 +25,22 @@ class JoblibParallelBackend(BaseParallelBackend, backend_name="joblib"): It shouldn't be initialized directly. You should instead call [init_parallel_backend()][pydvl.parallel.backend.init_parallel_backend]. + ??? Example + ``` python + from pydvl.parallel import init_paralle_backend, ParallelConfig + config = ParallelConfig(backend="joblib") + parallel_backend = init_parallel_backend(config) + ``` + + ??? Example + ``` python + import joblib + from pydvl.parallel import init_paralle_backend, ParallelConfig + with joblib.parallel_config(verbose=100): + config = ParallelConfig(backend="joblib") + parallel_backend = init_parallel_backend(config) + ``` + Args: config: instance of [ParallelConfig][pydvl.utils.config.ParallelConfig] with cluster address, number of cpus, etc. @@ -32,7 +48,6 @@ class JoblibParallelBackend(BaseParallelBackend, backend_name="joblib"): def __init__(self, config: ParallelConfig): self.config = { - "logging_level": config.logging_level, "n_jobs": config.n_cpus_local, } @@ -70,9 +85,8 @@ def wait(self, v: list[T], *args, **kwargs) -> tuple[list[T], list[T]]: return v, [] def _effective_n_jobs(self, n_jobs: int) -> int: - if self.config["n_jobs"] is None: - maximum_n_jobs = joblib.effective_n_jobs() - else: + eff_n_jobs: int = joblib.effective_n_jobs(n_jobs) + if self.config["n_jobs"] is not None: maximum_n_jobs = self.config["n_jobs"] - eff_n_jobs: int = min(joblib.effective_n_jobs(n_jobs), maximum_n_jobs) + eff_n_jobs = min(eff_n_jobs, maximum_n_jobs) return eff_n_jobs diff --git a/src/pydvl/parallel/backends/ray.py b/src/pydvl/parallel/backends/ray.py index abb33ebe3..3f9cc3f50 100644 --- a/src/pydvl/parallel/backends/ray.py +++ b/src/pydvl/parallel/backends/ray.py @@ -23,20 +23,27 @@ class RayParallelBackend(BaseParallelBackend, backend_name="ray"): It shouldn't be initialized directly. You should instead call [init_parallel_backend()][pydvl.parallel.backend.init_parallel_backend]. + ??? Example + ``` python + import ray + from pydvl.parallel import init_parallel_backend, ParallelConfig + ray.init() + config = ParallelConfig(backend="ray") + parallel_backend = init_parallel_backend(config) + ``` + Args: config: instance of [ParallelConfig][pydvl.utils.config.ParallelConfig] with cluster address, number of cpus, etc. """ def __init__(self, config: ParallelConfig): - self.config = { - "address": config.address, - "logging_level": config.logging_level or logging.WARNING, - } - if self.config["address"] is None: - self.config["num_cpus"] = config.n_cpus_local if not ray.is_initialized(): - ray.init(**self.config) + raise RuntimeError( + "Starting from v0.9.0, ray is no longer automatically initialized. " + "Please use `ray.init()` with the desired configuration " + "before using this class." + ) # Register ray joblib backend register_ray() diff --git a/src/pydvl/parallel/config.py b/src/pydvl/parallel/config.py index 46d2d1ee9..f63921f89 100644 --- a/src/pydvl/parallel/config.py +++ b/src/pydvl/parallel/config.py @@ -1,21 +1,24 @@ -import logging +import warnings from dataclasses import dataclass from typing import Literal, Optional, Tuple, Union __all__ = ["ParallelConfig"] +# TODO: delete this class once it's made redundant in v0.10.0 +# This string for the benefit of deprecation searches: +# remove_in="0.10.0" @dataclass(frozen=True) class ParallelConfig: """Configuration for parallel computation backend. Args: backend: Type of backend to use. Defaults to 'joblib' - address: Address of existing remote or local cluster to use. - n_cpus_local: Number of CPUs to use when creating a local ray cluster. + address: (DEPRECATED) Address of existing remote or local cluster to use. + n_cpus_local: (DEPRECATED) Number of CPUs to use when creating a local ray cluster. This has no effect when using an existing ray cluster. - logging_level: Logging level for the parallel backend's worker. - wait_timeout: Timeout in seconds for waiting on futures. + logging_level: (DEPRECATED) Logging level for the parallel backend's worker. + wait_timeout: (DEPRECATED) Timeout in seconds for waiting on futures. """ backend: Literal["joblib", "ray"] = "joblib" @@ -25,6 +28,30 @@ class ParallelConfig: wait_timeout: float = 1.0 def __post_init__(self) -> None: + warnings.warn( + "The `ParallelConfig` class was deprecated in v0.9.0 and will be removed in v0.10.0", + FutureWarning, + ) + if self.address is not None: + warnings.warn( + "`address` is deprecated in v0.9.0 and will be removed in v0.10.0", + FutureWarning, + ) + if self.n_cpus_local is not None: + warnings.warn( + "`n_cpus_local` is deprecated in v0.9.0 and will be removed in v0.10.0", + FutureWarning, + ) + if self.logging_level is not None: + warnings.warn( + "`logging_level` is deprecated in v0.9.0 and will be removed in v0.10.0", + FutureWarning, + ) + if self.wait_timeout != 1.0: + warnings.warn( + "`wait_timeout` is deprecated in v0.9.0 and will be removed in v0.10.0", + FutureWarning, + ) # FIXME: this is specific to ray if self.address is not None and self.n_cpus_local is not None: raise ValueError("When `address` is set, `n_cpus_local` should be None.") diff --git a/src/pydvl/parallel/futures/__init__.py b/src/pydvl/parallel/futures/__init__.py index c42026ecf..ce42ecc91 100644 --- a/src/pydvl/parallel/futures/__init__.py +++ b/src/pydvl/parallel/futures/__init__.py @@ -7,7 +7,7 @@ try: from pydvl.parallel.futures.ray import RayExecutor -except ImportError: +except ModuleNotFoundError: pass __all__ = ["init_executor"] @@ -30,7 +30,7 @@ def init_executor( ??? Examples ``` python - from pydvl.parallel import init_executor, ParallelConfig + from pydvl.parallel.futures import init_executor, ParallelConfig config = ParallelConfig(backend="ray") with init_executor(max_workers=1, config=config) as executor: diff --git a/src/pydvl/parallel/futures/ray.py b/src/pydvl/parallel/futures/ray.py index 1a9658744..0aa97f152 100644 --- a/src/pydvl/parallel/futures/ray.py +++ b/src/pydvl/parallel/futures/ray.py @@ -8,15 +8,20 @@ from typing import Any, Callable, Optional, TypeVar from weakref import WeakSet, ref -import ray -from deprecate import deprecated +try: + import ray +except ModuleNotFoundError as e: + raise ModuleNotFoundError( + f"Cannot use RayExecutor because ray was not installed. " + f"Make sure to install pyDVL using `pip install pyDVL[ray]`. \n" + f"Original error: {e}" + ) +from pydvl.parallel import CancellationPolicy from pydvl.parallel.config import ParallelConfig __all__ = ["RayExecutor"] -from pydvl.parallel import CancellationPolicy - T = TypeVar("T") logger = logging.getLogger(__name__) @@ -44,12 +49,6 @@ class RayExecutor(Executor): any. See [CancellationPolicy][pydvl.parallel.backend.CancellationPolicy] """ - @deprecated( - target=True, - deprecated_in="0.7.0", - remove_in="0.8.0", - args_mapping={"cancel_futures_on_exit": "cancel_futures"}, - ) def __init__( self, max_workers: Optional[int] = None, @@ -75,12 +74,12 @@ def __init__( else CancellationPolicy.NONE ) - self.config = {"address": config.address, "logging_level": config.logging_level} - if config.address is None: - self.config["num_cpus"] = config.n_cpus_local - if not ray.is_initialized(): - ray.init(**self.config) + raise RuntimeError( + "Starting from v0.9.0, ray is no longer automatically initialized. " + "Please use `ray.init()` with the desired configuration " + "before using this class." + ) self._max_workers = max_workers if self._max_workers is None: diff --git a/src/pydvl/parallel/map_reduce.py b/src/pydvl/parallel/map_reduce.py index 401df3fca..a4cfd272a 100644 --- a/src/pydvl/parallel/map_reduce.py +++ b/src/pydvl/parallel/map_reduce.py @@ -124,17 +124,8 @@ def __call__( Returns: The result of the reduce function. """ - parallel_kwargs: Dict[str, Any] = {"n_jobs": self.n_jobs} - if self.config.backend == "joblib": - parallel_kwargs["backend"] = "loky" - else: - parallel_kwargs["backend"] = self.config.backend - # In joblib the levels are reversed. - # 0 means no logging and 50 means log everything to stdout - if self.config.logging_level is not None: - parallel_kwargs["verbose"] = 50 - self.config.logging_level seed_seq = ensure_seed_sequence(seed) - with Parallel(**parallel_kwargs) as parallel: + with Parallel() as parallel: chunks = self._chunkify(self.inputs_, n_chunks=self.n_jobs) map_results: List[R] = parallel( delayed(self._map_func)( diff --git a/src/pydvl/utils/__init__.py b/src/pydvl/utils/__init__.py index 245c596dd..edc27c579 100644 --- a/src/pydvl/utils/__init__.py +++ b/src/pydvl/utils/__init__.py @@ -1,4 +1,3 @@ -from ..parallel import * from .caching import * from .config import * from .dataset import * diff --git a/src/pydvl/utils/caching/__init__.py b/src/pydvl/utils/caching/__init__.py index 1089628bc..dcf3118db 100644 --- a/src/pydvl/utils/caching/__init__.py +++ b/src/pydvl/utils/caching/__init__.py @@ -1,4 +1,4 @@ -"""Caching of functions. +"""This module provides caching of functions. PyDVL can cache (memoize) the computation of the utility function and speed up some computations for data valuation. @@ -12,7 +12,7 @@ # Configuration Caching is disabled by default but can be enabled easily, -see [Setting up the cache](#setting-up-the-cache). +see [Setting up the cache][getting-started-cache]. When enabled, it will be added to any callable used to construct a [Utility][pydvl.utils.utility.Utility] (done with the wrap method of [CacheBackend][pydvl.utils.caching.base.CacheBackend]). @@ -38,8 +38,10 @@ and read from a Memcached server. This is used to share cached values between processes across multiple machines. - **Note** This specific backend requires optional dependencies. - See [[installation#extras]] for more information) + !!! Info + This specific backend requires optional dependencies not installed by + default. See [Extra dependencies][installation-extras] for more + information. # Usage with stochastic functions @@ -50,7 +52,7 @@ the speed benefits of memoization. This behaviour can be activated with the option -[allow_repeated_evaluations][pydvl.utils.caching.config.CachedFuncConfig].. +[allow_repeated_evaluations][pydvl.utils.caching.config.CachedFuncConfig]. # Cache reuse @@ -84,5 +86,9 @@ from .base import * from .config import * from .disk import * -from .memcached import * from .memory import * + +try: + from .memcached import * +except ModuleNotFoundError: + pass diff --git a/src/pydvl/utils/caching/disk.py b/src/pydvl/utils/caching/disk.py index 06250a450..c291bbb80 100644 --- a/src/pydvl/utils/caching/disk.py +++ b/src/pydvl/utils/caching/disk.py @@ -26,8 +26,9 @@ class DiskCacheBackend(CacheBackend): Attributes: cache_dir: Base directory for cache storage. - ??? Examples - ``` pycon + Example: + Basic usage: + ```pycon >>> from pydvl.utils.caching.disk import DiskCacheBackend >>> cache_backend = DiskCacheBackend() >>> cache_backend.clear() @@ -37,7 +38,8 @@ class DiskCacheBackend(CacheBackend): 42 ``` - ``` pycon + Callable wrapping: + ```pycon >>> from pydvl.utils.caching.disk import DiskCacheBackend >>> cache_backend = DiskCacheBackend() >>> cache_backend.clear() diff --git a/src/pydvl/utils/caching/memcached.py b/src/pydvl/utils/caching/memcached.py index 63855682f..0eb348f68 100644 --- a/src/pydvl/utils/caching/memcached.py +++ b/src/pydvl/utils/caching/memcached.py @@ -9,10 +9,13 @@ from pymemcache import MemcacheUnexpectedCloseError from pymemcache.client import Client, RetryingClient from pymemcache.serde import PickleSerde +except ModuleNotFoundError as e: + raise ModuleNotFoundError( + f"Cannot use MemcachedCacheBackend because pymemcache was not installed. " + f"Make sure to install pyDVL using `pip install pyDVL[memcached]`. \n" + f"Original error: {e}" + ) - PYMEMCACHE_INSTALLED = True -except ImportError: - PYMEMCACHE_INSTALLED = False from .base import CacheBackend @@ -50,10 +53,10 @@ class MemcachedClientConfig: class MemcachedCacheBackend(CacheBackend): """Memcached cache backend for the distributed caching of functions. - Implements the CacheBackend interface for a memcached based cache. - This allows sharing evaluations across processes and nodes in a cluster. - You can run memcached as a service, locally or remotely, - see [Setting up the cache](#setting-up-the-cache) + Implements the [CacheBackend][pydvl.utils.caching.base.CacheBackend] + interface for a memcached based cache. This allows sharing evaluations + across processes and nodes in a cluster. You can run memcached as a service, + locally or remotely, see [the caching documentation][getting-started-cache]. Args: config: Memcached client configuration. @@ -62,8 +65,9 @@ class MemcachedCacheBackend(CacheBackend): config: Memcached client configuration. client: Memcached client instance. - ??? Examples - ``` pycon + Example: + Basic usage: + ```pycon >>> from pydvl.utils.caching.memcached import MemcachedCacheBackend >>> cache_backend = MemcachedCacheBackend() >>> cache_backend.clear() @@ -73,7 +77,8 @@ class MemcachedCacheBackend(CacheBackend): 42 ``` - ``` pycon + Callable wrapping: + ```pycon >>> from pydvl.utils.caching.memcached import MemcachedCacheBackend >>> cache_backend = MemcachedCacheBackend() >>> cache_backend.clear() @@ -103,11 +108,7 @@ def __init__(self, config: MemcachedClientConfig = MemcachedClientConfig()) -> N Args: config: Memcached client configuration. """ - if not PYMEMCACHE_INSTALLED: - raise ModuleNotFoundError( - "Cannot use MemcachedCacheBackend because pymemcache was not installed. " - "Make sure to install pyDVL using `pip install pyDVL[memcached]`" - ) + super().__init__() self.config = config self.client = self._connect(self.config) diff --git a/src/pydvl/utils/caching/memory.py b/src/pydvl/utils/caching/memory.py index 270d3ce1a..b7db38dee 100644 --- a/src/pydvl/utils/caching/memory.py +++ b/src/pydvl/utils/caching/memory.py @@ -18,8 +18,9 @@ class InMemoryCacheBackend(CacheBackend): Attributes: cached_values: Dictionary used to store cached values. - ??? Examples - ``` pycon + Example: + Basic usage: + ```pycon >>> from pydvl.utils.caching.memory import InMemoryCacheBackend >>> cache_backend = InMemoryCacheBackend() >>> cache_backend.clear() @@ -29,7 +30,8 @@ class InMemoryCacheBackend(CacheBackend): 42 ``` - ``` pycon + Callable wrapping: + ```pycon >>> from pydvl.utils.caching.memory import InMemoryCacheBackend >>> cache_backend = InMemoryCacheBackend() >>> cache_backend.clear() diff --git a/src/pydvl/utils/parallel.py b/src/pydvl/utils/parallel.py deleted file mode 100644 index 2df52605f..000000000 --- a/src/pydvl/utils/parallel.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -# This module is deprecated - -!!! warning "Redirects" - Imports from this module will be redirected to - [pydvl.parallel][pydvl.parallel] only until v0.9.0. Please update your - imports. -""" -import logging - -from ..parallel.backend import * -from ..parallel.config import * -from ..parallel.futures import * -from ..parallel.map_reduce import * - -log = logging.getLogger(__name__) - -# This string for the benefit of deprecation searches: -# remove_in="0.9.0" -log.warning( - "Importing parallel tools from pydvl.utils is deprecated. " - "Please import directly from pydvl.parallel. " - "Redirected imports will be removed in v0.9.0" -) diff --git a/src/pydvl/utils/types.py b/src/pydvl/utils/types.py index 1a915c33c..18a22bd26 100644 --- a/src/pydvl/utils/types.py +++ b/src/pydvl/utils/types.py @@ -23,7 +23,7 @@ ] IndexT = TypeVar("IndexT", bound=np.int_) -NameT = TypeVar("NameT", bound=np.object_) +NameT = TypeVar("NameT", np.object_, np.int_) R = TypeVar("R", covariant=True) Seed = Union[int, Generator] diff --git a/src/pydvl/utils/utility.py b/src/pydvl/utils/utility.py index b975c0ff2..b1426afc2 100644 --- a/src/pydvl/utils/utility.py +++ b/src/pydvl/utils/utility.py @@ -7,7 +7,7 @@ data and scoring function (the latter being what one usually understands under *utility* in the general definition of Shapley value). It is automatically cached across machines when the -[cache is configured][setting-up-the-cache] and it is enabled upon construction. +[cache is configured][getting-started-cache] and it is enabled upon construction. [DataUtilityLearning][pydvl.utils.utility.DataUtilityLearning] adds support for learning the scoring function to avoid repeated re-training @@ -38,7 +38,7 @@ from pydvl.utils.score import Scorer from pydvl.utils.types import SupervisedModel -__all__ = ["Utility", "DataUtilityLearning", "MinerGameUtility", "GlovesGameUtility"] +__all__ = ["Utility", "DataUtilityLearning"] logger = logging.getLogger(__name__) @@ -65,8 +65,8 @@ class Utility: Since evaluating the scoring function requires retraining the model and that can be time-consuming, this class wraps it and caches the results of each execution. Caching is available both locally and across nodes, but must - always be enabled for your project first, see [Setting up the - cache][setting-up-the-cache]. + always be enabled for your project first, see [the documentation][getting-started-cache] + and the [module documentation][pydvl.utils.caching]. Attributes: model: The supervised model. @@ -356,120 +356,3 @@ def __call__(self, indices: Iterable[int]) -> float: def data(self) -> Dataset: """Returns the wrapped utility's [Dataset][pydvl.utils.dataset.Dataset].""" return self.utility.data - - -class MinerGameUtility(Utility): - r"""Toy game utility that is used for testing and demonstration purposes. - - Consider a group of n miners, who have discovered large bars of gold. - - If two miners can carry one piece of gold, then the payoff of a - coalition $S$ is: - - $${ - v(S) = \left\{\begin{array}{lll} - \mid S \mid / 2 & \text{, if} & \mid S \mid \text{ is even} \\ - ( \mid S \mid - 1)/2 & \text{, if} & \mid S \mid \text{ is odd} - \end{array}\right. - }$$ - - If there are more than two miners and there is an even number of miners, - then the core consists of the single payoff where each miner gets 1/2. - - If there is an odd number of miners, then the core is empty. - - Taken from [Wikipedia](https://en.wikipedia.org/wiki/Core_(game_theory)) - - Args: - n_miners: Number of miners that participate in the game. - """ - - def __init__(self, n_miners: int, **kwargs): - if n_miners <= 2: - raise ValueError(f"n_miners, {n_miners} should be > 2") - self.n_miners = n_miners - - x = np.arange(n_miners)[..., np.newaxis] - # The y values don't matter here - y = np.zeros_like(x) - - self.data = Dataset(x_train=x, y_train=y, x_test=x, y_test=y) - - def __call__(self, indices: Iterable[int]) -> float: - n = len(tuple(indices)) - if n % 2 == 0: - return n / 2 - else: - return (n - 1) / 2 - - def _initialize_utility_wrapper(self): - pass - - def exact_least_core_values(self) -> Tuple[NDArray[np.float_], float]: - if self.n_miners % 2 == 0: - values = np.array([0.5] * self.n_miners) - subsidy = 0.0 - else: - values = np.array( - [(self.n_miners - 1) / (2 * self.n_miners)] * self.n_miners - ) - subsidy = (self.n_miners - 1) / (2 * self.n_miners) - return values, subsidy - - def __repr__(self) -> str: - return f"{self.__class__.__name__}(n={self.n_miners})" - - -class GlovesGameUtility(Utility): - r"""Toy game utility that is used for testing and demonstration purposes. - - In this game, some players have a left glove and others a right glove. - Single gloves have a worth of zero while pairs have a worth of 1. - - The payoff of a coalition $S$ is: - - $${ - v(S) = \min( \mid S \cap L \mid, \mid S \cap R \mid ) - }$$ - - Where $L$, respectively $R$, is the set of players with left gloves, - respectively right gloves. - - Args: - left: Number of players with a left glove. - right: Number of player with a right glove. - - """ - - def __init__(self, left: int, right: int, **kwargs): - self.left = left - self.right = right - - x = np.empty(left + right)[..., np.newaxis] - # The y values don't matter here - y = np.zeros_like(x) - - self.data = Dataset(x_train=x, y_train=y, x_test=x, y_test=y) - - def __call__(self, indices: Iterable[int]) -> float: - left_sum = float(np.sum(np.asarray(indices) < self.left)) - right_sum = float(np.sum(np.asarray(indices) >= self.left)) - return min(left_sum, right_sum) - - def _initialize_utility_wrapper(self): - pass - - def exact_least_core_values(self) -> Tuple[NDArray[np.float_], float]: - if self.left == self.right: - subsidy = -0.5 - values = np.array([0.5] * (self.left + self.right)) - elif self.left < self.right: - subsidy = 0.0 - values = np.array([1.0] * self.left + [0.0] * self.right) - else: - subsidy = 0.0 - values = np.array([0.0] * self.left + [1.0] * self.right) - return values, subsidy - - def __repr__(self) -> str: - return f"{self.__class__.__name__}(L={self.left}, R={self.right})" diff --git a/src/pydvl/value/__init__.py b/src/pydvl/value/__init__.py index 6cfcc2160..f3a647698 100644 --- a/src/pydvl/value/__init__.py +++ b/src/pydvl/value/__init__.py @@ -2,7 +2,7 @@ This module implements algorithms for the exact and approximate computation of values and semi-values. -See [Data valuation][computing-data-values] for an introduction to the concepts +See [Data valuation][data-valuation] for an introduction to the concepts and methods implemented here. """ diff --git a/src/pydvl/value/games.py b/src/pydvl/value/games.py new file mode 100644 index 000000000..b16d32f5a --- /dev/null +++ b/src/pydvl/value/games.py @@ -0,0 +1,638 @@ +""" +This module provides several predefined games and, depending on the game, +the corresponding Shapley values, Least Core values or both of them, for +benchmarking purposes. + +## References + +[^1]: Castro, J., Gómez, D. and Tejada, + J., 2009. [Polynomial calculation of the Shapley value based on + sampling](http://www.sciencedirect.com/science/article/pii/S0305054808000804). + Computers & Operations Research, 36(5), pp.1726-1730. + +""" +from __future__ import annotations + +from abc import ABC, abstractmethod +from functools import lru_cache +from typing import Iterable, Optional, Tuple + +import numpy as np +import scipy as sp +from numpy.typing import NDArray + +from pydvl.utils import Scorer, Status +from pydvl.utils.dataset import Dataset +from pydvl.utils.types import SupervisedModel +from pydvl.utils.utility import Utility +from pydvl.value import ValuationResult + +__all__ = [ + "Game", + "SymmetricVotingGame", + "AsymmetricVotingGame", + "ShoesGame", + "AirportGame", + "MinimumSpanningTreeGame", + "MinerGame", +] + + +class DummyGameDataset(Dataset): + """Dummy game dataset. + + Initializes a dummy game dataset with n_players and an optional + description. + + This class is used internally inside the [Game][pydvl.value.games.Game] + class. + + Args: + n_players: Number of players that participate in the game. + description: Optional description of the dataset. + """ + + def __init__(self, n_players: int, description: Optional[str] = None) -> None: + x = np.arange(0, n_players, 1).reshape(-1, 1) + nil = np.zeros_like(x) + super().__init__( + x, + nil.copy(), + nil.copy(), + nil.copy(), + feature_names=["x"], + target_names=["y"], + description=description, + ) + + def get_test_data( + self, indices: Optional[Iterable[int]] = None + ) -> Tuple[NDArray, NDArray]: + """Returns the subsets of the train set instead of the test set. + + Args: + indices: Indices into the training data. + + Returns: + Subset of the train data. + """ + if indices is None: + return self.x_train, self.y_train + x = self.x_train[indices] + y = self.y_train[indices] + return x, y + + +class DummyModel(SupervisedModel): + """Dummy model class. + + A dummy supervised model used for testing purposes only. + """ + + def __init__(self) -> None: + pass + + def fit(self, x: NDArray, y: NDArray) -> None: + pass + + def predict(self, x: NDArray) -> NDArray: # type: ignore + pass + + def score(self, x: NDArray, y: NDArray) -> float: + # Dummy, will be overriden + return 0 + + +class Game(ABC): + """Base class for games + + Any Game subclass has to implement the abstract `_score` method + to assign a score to each coalition/subset and at least + one of `shapley_values`, `least_core_values`. + + Args: + n_players: Number of players that participate in the game. + score_range: Minimum and maximum values of the `_score` method. + description: Optional string description of the dummy dataset that will be created. + + Attributes: + n_players: Number of players that participate in the game. + data: Dummy dataset object. + u: Utility object with a dummy model and dataset. + """ + + def __init__( + self, + n_players: int, + score_range: Tuple[float, float] = (-np.inf, np.inf), + description: Optional[str] = None, + ): + self.n_players = n_players + self.data = DummyGameDataset(self.n_players, description) + self.u = Utility( + DummyModel(), + self.data, + scorer=Scorer(self._score, range=score_range), + catch_errors=False, + show_warnings=True, + ) + + def shapley_values(self) -> ValuationResult: + raise NotImplementedError( + f"shapley_values method was not implemented for class {self.__class__.__name__}" + ) + + def least_core_values(self) -> ValuationResult: + raise NotImplementedError( + f"least_core_values method was not implemented for class {self.__class__.__name__}" + ) + + @abstractmethod + def _score(self, model: SupervisedModel, X: NDArray, y: NDArray) -> float: + ... + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(n_players={self.n_players})" + + +class SymmetricVotingGame(Game): + r"""Toy game that is used for testing and demonstration purposes. + + A symmetric voting game defined in + (Castro et al., 2009)1 + Section 4.1 + + For this game the utility of a coalition is 1 if its cardinality is + greater than num_samples/2, or 0 otherwise. + + $${ + v(S) = \left\{\begin{array}{ll} + 1, & \text{ if} \quad \mid S \mid > \frac{N}{2} \\ + 0, & \text{ otherwise} + \end{array}\right. + }$$ + + Args: + n_players: Number of players that participate in the game. + """ + + def __init__(self, n_players: int) -> None: + if n_players % 2 != 0: + raise ValueError("n_players must be an even number.") + description = "Dummy data for the symmetric voting game in Castro et al. 2009" + super().__init__( + n_players, + score_range=(0, 1), + description=description, + ) + + def _score(self, model: SupervisedModel, X: NDArray, y: NDArray) -> float: + return 1 if len(X) > len(self.data) // 2 else 0 + + @lru_cache + def shapley_values(self) -> ValuationResult: + exact_values = np.ones(self.n_players) / self.n_players + result: ValuationResult[np.int_, np.int_] = ValuationResult( + algorithm="exact_shapley", + status=Status.Converged, + indices=self.data.indices, + values=exact_values, + variances=np.zeros_like(self.data.x_train), + counts=np.zeros_like(self.data.x_train), + ) + return result + + +class AsymmetricVotingGame(Game): + r"""Toy game that is used for testing and demonstration purposes. + + An asymmetric voting game defined in + (Castro et al., 2009)1 + Section 4.2. + + For this game the player set is $N = \{1,\dots,51\}$ and + the utility of a coalition is given by: + + $${ + v(S) = \left\{\begin{array}{ll} + 1, & \text{ if} \quad \sum\limits_{i \in S} w_i > \sum\limits_{j \in N}\frac{w_j}{2} \\ + 0, & \text{ otherwise} + \end{array}\right. + }$$ + + where $w = [w_1,\dots, w_{51}]$ is a list of weights associated with each player. + + Args: + n_players: Number of players that participate in the game. + """ + + def __init__(self, n_players: int = 51) -> None: + if n_players != 51: + raise ValueError( + f"{self.__class__.__name__} only supports n_players=51 but got {n_players=}." + ) + description = "Dummy data for the asymmetric voting game in Castro et al. 2009" + super().__init__( + n_players, + score_range=(0, 1), + description=description, + ) + + ranges = [ + range(0, 1), + range(1, 2), + range(2, 3), + range(3, 5), + range(5, 6), + range(6, 7), + range(7, 9), + range(9, 10), + range(10, 12), + range(12, 15), + range(15, 16), + range(16, 20), + range(20, 24), + range(24, 26), + range(26, 30), + range(30, 34), + range(34, 35), + range(35, 44), + range(44, 51), + ] + + ranges_weights = [ + 45, + 41, + 27, + 26, + 25, + 21, + 17, + 14, + 13, + 12, + 11, + 10, + 9, + 8, + 7, + 6, + 5, + 4, + 3, + ] + ranges_values = [ + "0.08831", + "0.07973", + "0.05096", + "0.04898", + "0.047", + "0.03917", + "0.03147", + "0.02577", + "0.02388", + "0.022", + "0.02013", + "0.01827", + "0.01641", + "0.01456", + "0.01272", + "0.01088", + "0.009053", + "0.00723", + "0.005412", + ] + + self.weight_table = np.zeros(self.n_players) + exact_values = np.zeros(self.n_players) + for r, w, v in zip(ranges, ranges_weights, ranges_values): + self.weight_table[r] = w + exact_values[r] = v + + self.exact_values = exact_values + self.threshold = np.sum(self.weight_table) / 2 + + def _score(self, model: SupervisedModel, X: NDArray, y: NDArray) -> float: + return 1 if np.sum(self.weight_table[X]) > self.threshold else 0 + + @lru_cache + def shapley_values(self) -> ValuationResult: + result: ValuationResult[np.int_, np.int_] = ValuationResult( + algorithm="exact_shapley", + status=Status.Converged, + indices=self.data.indices, + values=self.exact_values, + variances=np.zeros_like(self.data.x_train), + counts=np.zeros_like(self.data.x_train), + ) + return result + + +class ShoesGame(Game): + r"""Toy game that is used for testing and demonstration purposes. + + A shoes game defined in (Castro et al., + 2009)1. + + In this game, some players have a left shoe and others a right shoe. + Single shoes have a worth of zero while pairs have a worth of 1. + + The payoff of a coalition $S$ is: + + $${ + v(S) = \min( \mid S \cap L \mid, \mid S \cap R \mid ) + }$$ + + Where $L$, respectively $R$, is the set of players with left shoes, + respectively right shoes. + + Args: + left: Number of players with a left shoe. + right: Number of players with a right shoe. + """ + + def __init__(self, left: int, right: int) -> None: + self.left = left + self.right = right + n_players = self.left + self.right + description = "Dummy data for the shoe game in Castro et al. 2009" + max_score = n_players // 2 + super().__init__(n_players, score_range=(0, max_score), description=description) + + def _score(self, model: SupervisedModel, X: NDArray, y: NDArray) -> float: + left_sum = float(np.sum(np.asarray(X) < self.left)) + right_sum = float(np.sum(np.asarray(X) >= self.left)) + return min(left_sum, right_sum) + + @lru_cache + def shapley_values(self) -> ValuationResult: + if self.left != self.right and (self.left > 4 or self.right > 4): + raise ValueError( + "This class only supports getting exact shapley values " + "for left <= 4 and right <= 4 or left == right" + ) + precomputed_values = np.array( + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.5, 0.667, 0.75, 0.8], + [0.0, 0.167, 0.5, 0.65, 0.733], + [0.0, 0.083, 0.233, 0.5, 0.638], + [0.0, 0.050, 0.133, 0.271, 0.5], + ] + ) + if self.left == self.right: + value_left = value_right = min(self.left, self.right) / ( + self.left + self.right + ) + else: + value_left = precomputed_values[self.left, self.right] + value_right = precomputed_values[self.right, self.left] + exact_values = np.array([value_left] * self.left + [value_right] * self.right) + result: ValuationResult[np.int_, np.int_] = ValuationResult( + algorithm="exact_shapley", + status=Status.Converged, + indices=self.data.indices, + values=exact_values, + variances=np.zeros_like(self.data.x_train), + counts=np.zeros_like(self.data.x_train), + ) + return result + + @lru_cache + def least_core_values(self) -> ValuationResult: + if self.left == self.right: + subsidy = -0.5 + exact_values = np.array([0.5] * (self.left + self.right)) + elif self.left < self.right: + subsidy = 0.0 + exact_values = np.array([1.0] * self.left + [0.0] * self.right) + else: + subsidy = 0.0 + exact_values = np.array([0.0] * self.left + [1.0] * self.right) + + result: ValuationResult[np.int_, np.int_] = ValuationResult( + algorithm="exact_least_core", + status=Status.Converged, + indices=self.data.indices, + values=exact_values, + subsidy=subsidy, + variances=np.zeros_like(self.data.x_train), + counts=np.zeros_like(self.data.x_train), + ) + return result + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(L={self.left}, R={self.right})" + + +class AirportGame(Game): + """Toy game that is used for testing and demonstration purposes. + + An airport game defined in + (Castro et al., 2009)1 + Section 4.3 + + Args: + n_players: Number of players that participate in the game. + """ + + def __init__(self, n_players: int = 100) -> None: + if n_players != 100: + raise ValueError( + f"{self.__class__.__name__} only supports n_players=100 but got {n_players=}." + ) + description = "A dummy dataset for the airport game in Castro et al. 2009" + super().__init__(n_players, score_range=(0, 100), description=description) + ranges = [ + range(0, 8), + range(8, 20), + range(20, 26), + range(26, 40), + range(40, 48), + range(48, 57), + range(57, 70), + range(70, 80), + range(80, 90), + range(90, 100), + ] + exact = [ + 0.01, + 0.020869565, + 0.033369565, + 0.046883079, + 0.063549745, + 0.082780515, + 0.106036329, + 0.139369662, + 0.189369662, + 0.289369662, + ] + c = list(range(1, 10)) + score_table = np.zeros(100) + exact_values = np.zeros(100) + + for r, v in zip(ranges, exact): + score_table[r] = c + exact_values[r] = v + + self.exact_values = exact_values + self.score_table = score_table + + def _score(self, model: SupervisedModel, X: NDArray, y: NDArray) -> float: + return max(self.score_table[X]) or 0.0 + + @lru_cache + def shapley_values(self) -> ValuationResult: + result: ValuationResult[np.int_, np.int_] = ValuationResult( + algorithm="exact_shapley", + status=Status.Converged, + indices=self.data.indices, + values=self.exact_values, + variances=np.zeros_like(self.data.x_train), + counts=np.zeros_like(self.data.x_train), + ) + return result + + +class MinimumSpanningTreeGame(Game): + r"""Toy game that is used for testing and demonstration purposes. + + A minimum spanning tree game defined in + (Castro et al., 2009)1. + + Let $G = (N \cup \{0\},E)$ be a valued graph where $N = \{1,\dots,100\}$, + and the cost associated to an edge $(i, j)$ is: + + $${ + c_{ij} = \left\{\begin{array}{lll} + 1, & \text{ if} & i = j + 1 \text{ or } i = j - 1 \\ + & & \text{ or } (i = 1 \text{ and } j = 100) \text{ or } (i = 100 \text{ and } j = 1) \\ + 101, & \text{ if} & i = 0 \text{ or } j = 0 \\ + \infty, & \text{ otherwise} + \end{array}\right. + }$$ + + A minimum spanning tree game $(N, c)$ is a cost game, where for a given coalition + $S \subset N$, $v(S)$ is the sum of the edge cost of the minimum spanning tree, + i.e. $v(S)$ = Minimum Spanning Tree of the graph $G|_{S\cup\{0\}}$, + which is the partial graph restricted to the players $S$ and the source node $0$. + + Args: + n_players: Number of players that participate in the game. + """ + + def __init__(self, n_players: int = 100) -> None: + if n_players != 100: + raise ValueError( + f"{self.__class__.__name__} only supports n_players=100 but got {n_players=}." + ) + description = ( + "A dummy dataset for the minimum spanning tree game in Castro et al. 2009" + ) + super().__init__(n_players, score_range=(0, np.inf), description=description) + + graph = np.zeros(shape=(self.n_players, self.n_players)) + + for i in range(self.n_players): + for j in range(self.n_players): + if ( + i == j + 1 + or i == j - 1 + or (i == 1 and j == self.n_players - 1) + or (i == self.n_players - 1 and j == 1) + ): + graph[i, j] = 1 + elif i == 0 or j == 0: + graph[i, j] = 0 + else: + graph[i, j] = np.inf + assert np.all(graph == graph.T) + + self.graph = graph + + def _score(self, model: SupervisedModel, X: NDArray, y: NDArray) -> float: + partial_graph = sp.sparse.csr_array(self.graph[np.ix_(X, X)]) + span_tree = sp.sparse.csgraph.minimum_spanning_tree(partial_graph) + return span_tree.sum() or 0 + + @lru_cache + def shapley_values(self) -> ValuationResult: + exact_values = 2 * np.ones_like(self.data.x_train) + result: ValuationResult[np.int_, np.int_] = ValuationResult( + algorithm="exact_shapley", + status=Status.Converged, + indices=self.data.indices, + values=exact_values, + variances=np.zeros_like(self.data.x_train), + counts=np.zeros_like(self.data.x_train), + ) + return result + + +class MinerGame(Game): + r"""Toy game that is used for testing and demonstration purposes. + + Consider a group of n miners, who have discovered large bars of gold. + + If two miners can carry one piece of gold, then the payoff of a + coalition $S$ is: + + $${ + v(S) = \left\{\begin{array}{lll} + \mid S \mid / 2, & \text{ if} & \mid S \mid \text{ is even} \\ + ( \mid S \mid - 1)/2, & \text{ otherwise} + \end{array}\right. + }$$ + + If there are more than two miners and there is an even number of miners, + then the core consists of the single payoff where each miner gets 1/2. + + If there is an odd number of miners, then the core is empty. + + Taken from [Wikipedia](https://en.wikipedia.org/wiki/Core_(game_theory)) + + Args: + n_players: Number of miners that participate in the game. + """ + + def __init__(self, n_players: int) -> None: + if n_players <= 2: + raise ValueError(f"n_players, {n_players}, should be > 2") + description = "Dummy data for Miner Game taken from https://en.wikipedia.org/wiki/Core_(game_theory)" + super().__init__( + n_players, + score_range=(0, n_players // 2), + description=description, + ) + + def _score(self, model: SupervisedModel, X: NDArray, y: NDArray) -> float: + n = len(X) + if n % 2 == 0: + return n / 2 + else: + return (n - 1) / 2 + + @lru_cache() + def least_core_values(self) -> ValuationResult: + if self.n_players % 2 == 0: + values = np.array([0.5] * self.n_players) + subsidy = 0.0 + else: + values = np.array( + [(self.n_players - 1) / (2 * self.n_players)] * self.n_players + ) + subsidy = (self.n_players - 1) / (2 * self.n_players) + + result: ValuationResult[np.int_, np.int_] = ValuationResult( + algorithm="exact_least_core", + status=Status.Converged, + indices=self.data.indices, + values=values, + subsidy=subsidy, + variances=np.zeros_like(self.data.x_train), + counts=np.zeros_like(self.data.x_train), + ) + return result + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(n={self.n_players})" diff --git a/src/pydvl/value/least_core/__init__.py b/src/pydvl/value/least_core/__init__.py index abf34c623..6facf9396 100644 --- a/src/pydvl/value/least_core/__init__.py +++ b/src/pydvl/value/least_core/__init__.py @@ -3,7 +3,7 @@ This package holds all routines for the computation of Least Core data values. -Please refer to [Data valuation][computing-data-values] for an overview. +Please refer to [Data valuation][data-valuation] for an overview. In addition to the standard interface via [compute_least_core_values()][pydvl.value.least_core.compute_least_core_values], because computing the @@ -47,12 +47,13 @@ def compute_least_core_values( mode: LeastCoreMode = LeastCoreMode.MonteCarlo, non_negative_subsidy: bool = False, solver_options: Optional[dict] = None, + progress: bool = False, **kwargs, ) -> ValuationResult: """Umbrella method to compute Least Core values with any of the available algorithms. - See [Data valuation][computing-data-values] for an overview. + See [Data valuation][data-valuation] for an overview. The following algorithms are available. Note that the exact method can only work with very small datasets and is thus intended only for testing. @@ -80,20 +81,6 @@ def compute_least_core_values( !!! tip "New in version 0.5.0" """ - progress: bool = kwargs.pop("progress", False) - - # TODO: remove this before releasing version 0.7.0 - if kwargs: - warnings.warn( - DeprecationWarning( - "Passing solver options as kwargs was deprecated in 0.6.0, will " - "be removed in 0.7.0. `Use solver_options` instead." - ) - ) - if solver_options is None: - solver_options = kwargs - else: - solver_options.update(kwargs) if mode == LeastCoreMode.MonteCarlo: # TODO fix progress showing in remote case diff --git a/src/pydvl/value/least_core/common.py b/src/pydvl/value/least_core/common.py index 2de8e7e3a..984930217 100644 --- a/src/pydvl/value/least_core/common.py +++ b/src/pydvl/value/least_core/common.py @@ -34,7 +34,6 @@ def lc_solve_problem( algorithm: str, non_negative_subsidy: bool = False, solver_options: Optional[dict] = None, - **options, ) -> ValuationResult: """Solves a linear problem as prepared by [mclc_prepare_problem()][pydvl.value.least_core.montecarlo.mclc_prepare_problem]. @@ -55,20 +54,6 @@ def lc_solve_problem( RuntimeWarning, ) - # TODO: remove this before releasing version 0.7.0 - if options: - warnings.warn( - DeprecationWarning( - "Passing solver options as kwargs was deprecated in " - "0.6.0, will be removed in 0.7.0. `Use solver_options` " - "instead." - ) - ) - if solver_options is None: - solver_options = options - else: - solver_options.update(options) - if solver_options is None: solver_options = {} diff --git a/src/pydvl/value/least_core/montecarlo.py b/src/pydvl/value/least_core/montecarlo.py index 88dc11ded..5a7a3c883 100644 --- a/src/pydvl/value/least_core/montecarlo.py +++ b/src/pydvl/value/least_core/montecarlo.py @@ -27,7 +27,6 @@ def montecarlo_least_core( config: ParallelConfig = ParallelConfig(), non_negative_subsidy: bool = False, solver_options: Optional[dict] = None, - options: Optional[dict] = None, progress: bool = False, seed: Optional[Seed] = None, ) -> ValuationResult: @@ -60,28 +59,12 @@ def montecarlo_least_core( and to configure it. Refer to [cvxpy's documentation](https://www.cvxpy.org/tutorial/advanced/index.html#setting-solver-options) for all possible options. - options: (Deprecated) Dictionary of solver options. Use solver_options - instead. progress: If True, shows a tqdm progress bar seed: Either an instance of a numpy random number generator or a seed for it. Returns: Object with the data values and the least core value. """ - # TODO: remove this before releasing version 0.7.0 - if options: - warnings.warn( - DeprecationWarning( - "Passing solver options as kwargs was deprecated in " - "0.6.0, will be removed in 0.7.0. `Use solver_options` " - "instead." - ) - ) - if solver_options is None: - solver_options = options - else: - solver_options.update(options) - problem = mclc_prepare_problem( u, n_iterations, n_jobs=n_jobs, config=config, progress=progress, seed=seed ) diff --git a/src/pydvl/value/least_core/naive.py b/src/pydvl/value/least_core/naive.py index f97021678..713298234 100644 --- a/src/pydvl/value/least_core/naive.py +++ b/src/pydvl/value/least_core/naive.py @@ -20,7 +20,6 @@ def exact_least_core( *, non_negative_subsidy: bool = False, solver_options: Optional[dict] = None, - options: Optional[dict] = None, progress: bool = True, ) -> ValuationResult: r"""Computes the exact Least Core values. @@ -46,14 +45,12 @@ def exact_least_core( Args: u: Utility object with model, data, and scoring function - non_negative_subsidy: If True, the least core subsidy $e$ is constrained + non_negative_subsidy: If True, the least core subsidy $e$ is constrained to be non-negative. solver_options: Dictionary of options that will be used to select a solver and to configure it. Refer to the [cvxpy's documentation](https://www.cvxpy.org/tutorial/advanced/index.html#setting-solver-options) for all possible options. - options: (Deprecated) Dictionary of solver options. Use `solver_options` - instead. progress: If True, shows a tqdm progress bar Returns: @@ -63,20 +60,6 @@ def exact_least_core( if n > 20: # Arbitrary choice, will depend on time required, caching, etc. warnings.warn(f"Large dataset! Computation requires 2^{n} calls to model.fit()") - # TODO: remove this before releasing version 0.7.0 - if options: - warnings.warn( - DeprecationWarning( - "Passing solver options as kwargs was deprecated in " - "0.6.0, will " - "be removed in 0.7.0. `Use solver_options` instead." - ) - ) - if solver_options is None: - solver_options = options - else: - solver_options.update(options) - problem = lc_prepare_problem(u, progress=progress) return lc_solve_problem( problem=problem, diff --git a/src/pydvl/value/loo/__init__.py b/src/pydvl/value/loo/__init__.py index 6b9e972fc..93b927272 100644 --- a/src/pydvl/value/loo/__init__.py +++ b/src/pydvl/value/loo/__init__.py @@ -1,2 +1 @@ from .loo import * -from .naive import * diff --git a/src/pydvl/value/loo/naive.py b/src/pydvl/value/loo/naive.py deleted file mode 100644 index 82c12c730..000000000 --- a/src/pydvl/value/loo/naive.py +++ /dev/null @@ -1,19 +0,0 @@ -from deprecate import deprecated - -from pydvl.utils import Utility -from pydvl.value.result import ValuationResult - -from .loo import compute_loo - -__all__ = ["naive_loo"] - - -@deprecated( - target=compute_loo, - deprecated_in="0.7.0", - remove_in="0.8.0", - args_extra=dict(n_jobs=1), -) -def naive_loo(u: Utility, *, progress: bool = True, **kwargs) -> ValuationResult: # type: ignore - """Deprecated. Use [compute_loo][pydvl.value.loo.compute_loo] instead.""" - pass diff --git a/src/pydvl/value/result.py b/src/pydvl/value/result.py index 20def1390..bd6a78d10 100644 --- a/src/pydvl/value/result.py +++ b/src/pydvl/value/result.py @@ -63,7 +63,7 @@ ) import numpy as np -from deprecate import deprecated +import pandas as pd from numpy.typing import NDArray from pydvl.utils.dataset import Dataset @@ -71,11 +71,6 @@ from pydvl.utils.status import Status from pydvl.utils.types import IndexT, NameT, Seed -try: - import pandas # Try to import here for the benefit of mypy -except ImportError: - pass - __all__ = ["ValuationResult", "ValueItem"] logger = logging.getLogger(__name__) @@ -664,7 +659,7 @@ def get(self, idx: Integral) -> ValueItem: def to_dataframe( self, column: Optional[str] = None, use_names: bool = False - ) -> pandas.DataFrame: + ) -> pd.DataFrame: """Returns values as a dataframe. Args: @@ -677,13 +672,9 @@ def to_dataframe( A dataframe with two columns, one for the values, with name given as explained in `column`, and another with standard errors for approximate algorithms. The latter will be named `column+'_stderr'`. - Raises: - ImportError: If pandas is not installed """ - if not pandas: - raise ImportError("Pandas required for DataFrame export") column = column or self._algorithm - df = pandas.DataFrame( + df = pd.DataFrame( self._values[self._sort_positions], index=self._names[self._sort_positions] if use_names @@ -735,14 +726,6 @@ def from_random( return cls(**options) # type: ignore @classmethod - @deprecated( - target=True, - deprecated_in="0.6.0", - remove_in="0.8.0", - args_mapping=dict(indices=None, data_names=None, n_samples=None), - template_mgs="`%(source_name)s` is deprecated for generating zero-filled " - "results, use `ValuationResult.zeros()` instead.", - ) def empty( cls, algorithm: str = "", @@ -757,6 +740,10 @@ def empty( Args: algorithm: Name of the algorithm used to compute the values + indices: Optional sequence or array of indices. + data_names: Optional sequences or array of names for the data points. + Defaults to index numbers if not set. + n_samples: Number of valuation result entries. Returns: Object with the results. diff --git a/src/pydvl/value/sampler.py b/src/pydvl/value/sampler.py index ff360d154..a63519d1d 100644 --- a/src/pydvl/value/sampler.py +++ b/src/pydvl/value/sampler.py @@ -65,7 +65,6 @@ ) import numpy as np -from deprecate import deprecated, void from numpy.typing import NDArray from pydvl.utils.numeric import powerset, random_subset, random_subset_of_size @@ -87,7 +86,7 @@ class PowersetSampler(abc.ABC, Iterable[SampleT], Generic[IndexT]): - """Samplers are custom iterables over subsets of indices. + r"""Samplers are custom iterables over subsets of indices. Calling ``iter()`` on a sampler returns an iterator over tuples of the form $(i, S)$, where $i$ is an index of interest, and $S \subset I \setminus \{i\}$ @@ -313,14 +312,6 @@ def weight(cls, n: int, subset_len: int) -> float: return float(2 ** (n - 1)) if n > 0 else 1.0 -class DeterministicCombinatorialSampler(DeterministicUniformSampler[IndexT]): - @deprecated( - target=DeterministicUniformSampler, deprecated_in="0.6.0", remove_in="0.8.0" - ) - def __init__(self, indices: NDArray[IndexT], *args, **kwargs): - void(indices, args, kwargs) - - class AntitheticSampler(StochasticSamplerMixin, PowersetSampler[IndexT]): """An iterator to perform uniform random sampling of subsets, and their complements. diff --git a/src/pydvl/value/semivalues.py b/src/pydvl/value/semivalues.py index 9eee1c83d..841d25213 100644 --- a/src/pydvl/value/semivalues.py +++ b/src/pydvl/value/semivalues.py @@ -94,6 +94,7 @@ from itertools import islice from typing import Iterable, List, Optional, Protocol, Tuple, Type, cast +import numpy as np import scipy as sp from deprecate import deprecated from tqdm import tqdm @@ -271,15 +272,10 @@ def compute_generic_semivalues( # Filter out samples for indices that have already converged filtered_samples = samples - if skip_converged and len(done.converged) > 0: - # TODO: cloudpickle can't pickle this on python 3.8: - # filtered_samples = filter( - # lambda t: not done.converged[t[0]], samples - # ) + if skip_converged and np.count_nonzero(done.converged) > 0: + # TODO: cloudpickle can't pickle result of `filter` on python 3.8 filtered_samples = tuple( - (idx, sample) - for idx, sample in samples - if not done.converged[idx] + filter(lambda t: not done.converged[t[0]], samples) ) if filtered_samples: @@ -527,17 +523,16 @@ def compute_semivalues( - [SemiValueMode.Shapley][pydvl.value.semivalues.SemiValueMode]: Shapley values. - - [SemiValueMode.BetaShapley][pydvl.value.semivalues.SemiValueMode.BetaShapley]: + - [SemiValueMode.BetaShapley][pydvl.value.semivalues.SemiValueMode]: Implements the Beta Shapley semi-value as introduced in (Kwon and Zou, 2022)1. Pass additional keyword arguments `alpha` and `beta` to set the parameters of the Beta distribution (both default to 1). - - [SemiValueMode.Banzhaf][SemiValueMode.Banzhaf]: Implements the Banzhaf - semi-value as introduced in (Wang and Jia, 2022)1. - - See [[data-valuation]] for an overview of valuation. - [SemiValueMode.Banzhaf][pydvl.value.semivalues.SemiValueMode]: Implements - the Banzhaf semi-value as introduced in [@wang_data_2022]. + the Banzhaf semi-value as introduced in (Wang and Jia, 2022)1. + + See [Data valuation][data-valuation] for an overview of valuation. Args: u: Utility object with model, data, and scoring function. diff --git a/src/pydvl/value/shapley/classwise.py b/src/pydvl/value/shapley/classwise.py index ca169a0f0..f8fb0dbee 100644 --- a/src/pydvl/value/shapley/classwise.py +++ b/src/pydvl/value/shapley/classwise.py @@ -17,8 +17,7 @@ !!! tip "Analysis of Class-wise Shapley" For a detailed analysis of the method, with comparison to other valuation - techniques, please refer to the [main - documentation](../../../../../value/classwise-shapley). + techniques, please refer to the [main documentation][class-wise-shapley]. In practice, the quantity above is estimated using Monte Carlo sampling of the powerset and the set of index permutations. This results in the estimator @@ -265,8 +264,9 @@ def compute_classwise_shapley_values( $$ where $\sigma_{:i}$ denotes the set of indices in permutation sigma before - the position where $i$ appears and $S$ is a subset of the index set of all other - labels(see [[data-valuation]] for details). + the position where $i$ appears and $S$ is a subset of the index set of all + other labels (see [the main documentation][class-wise-shapley] for + details). Args: u: Utility object containing model, data, and scoring function. The diff --git a/src/pydvl/value/shapley/common.py b/src/pydvl/value/shapley/common.py index eda884e6e..6eb07ae93 100644 --- a/src/pydvl/value/shapley/common.py +++ b/src/pydvl/value/shapley/common.py @@ -33,7 +33,7 @@ def compute_shapley_values( """Umbrella method to compute Shapley values with any of the available algorithms. - See [[data-valuation]] for an overview. + See [Data valuation][data-valuation] for an overview. The following algorithms are available. Note that the exact methods can only work with very small datasets and are thus intended only for testing. Some diff --git a/src/pydvl/value/shapley/gt.py b/src/pydvl/value/shapley/gt.py index 34b3b00c1..d47056825 100644 --- a/src/pydvl/value/shapley/gt.py +++ b/src/pydvl/value/shapley/gt.py @@ -10,7 +10,7 @@ of evaluations of the utility required). We recommend other Monte Carlo methods instead. -You can read more [in the documentation][computing-data-values]. +You can read more [in the documentation][data-valuation]. !!! tip "New in version 0.4.0" @@ -174,7 +174,7 @@ def group_testing_shapley( config: ParallelConfig = ParallelConfig(), progress: bool = False, seed: Optional[Seed] = None, - **options, + **options: dict, ) -> ValuationResult: """Implements group testing for approximation of Shapley values as described in (Jia, R. et al., 2019)1. diff --git a/src/pydvl/value/shapley/montecarlo.py b/src/pydvl/value/shapley/montecarlo.py index aabc2d813..c10d64df6 100644 --- a/src/pydvl/value/shapley/montecarlo.py +++ b/src/pydvl/value/shapley/montecarlo.py @@ -3,8 +3,8 @@ !!! Warning You probably want to use the common interface provided by - [compute_shapley_values()][pydvl.value.shapley.compute_shapley_values] instead of directly using - the functions in this module. + [compute_shapley_values()][pydvl.value.shapley.compute_shapley_values] + instead of directly using the functions in this module. Because exact computation of Shapley values requires $\mathcal{O}(2^n)$ re-trainings of the model, several Monte Carlo approximations are available. The @@ -16,9 +16,9 @@ Alternatively, employing another reformulation of the expression above as a sum over permutations, one has the implementation in -[permutation_montecarlo_shapley()][pydvl.value.shapley.montecarlo.permutation_montecarlo_shapley], -or using an early stopping strategy to reduce computation -[truncated_montecarlo_shapley()][pydvl.value.shapley.truncated.truncated_montecarlo_shapley]. +[permutation_montecarlo_shapley()][pydvl.value.shapley.montecarlo.permutation_montecarlo_shapley] +with the option to pass an early stopping strategy to reduce computation +as done in Truncated MonteCarlo Shapley (TMCS). !!! info "Also see" It is also possible to use [group_testing_shapley()][pydvl.value.shapley.gt.group_testing_shapley] @@ -50,7 +50,6 @@ from typing import Optional, Sequence, Union import numpy as np -from deprecate import deprecated from numpy.random import SeedSequence from numpy.typing import NDArray from tqdm.auto import tqdm @@ -82,17 +81,20 @@ def _permutation_montecarlo_one_step( algorithm_name: str, seed: Optional[Union[Seed, SeedSequence]] = None, ) -> ValuationResult: - """Helper function for [permutation_montecarlo_shapley()][pydvl.value.shapley.montecarlo.permutation_montecarlo_shapley]. + """Helper function for + [permutation_montecarlo_shapley()][pydvl.value.shapley.montecarlo.permutation_montecarlo_shapley]. Computes marginal utilities of each training sample in a randomly sampled permutation. + Args: u: Utility object with model, data, and scoring function truncation: A callable which decides whether to interrupt processing a permutation and set all subsequent marginals to zero. algorithm_name: For the results object. Used internally by different variants of Shapley using this subroutine - seed: Either an instance of a numpy random number generator or a seed for it. + seed: Either an instance of a numpy random number generator or a seed + for it. Returns: An object with the results @@ -125,14 +127,6 @@ def _permutation_montecarlo_one_step( return result -@deprecated( - target=True, - deprecated_in="0.7.0", - remove_in="0.8.0", - args_mapping=dict( - coordinator_update_period=None, worker_update_period=None, progress=None - ), -) def permutation_montecarlo_shapley( u: Utility, done: StoppingCriterion, @@ -154,12 +148,12 @@ def permutation_montecarlo_shapley( where $\sigma_{:i}$ denotes the set of indices in permutation sigma before the position where $i$ appears (see [[data-valuation]] for details). - This implements the method described in (Ghorbani and Zou, 2019)1 - with a double stopping criterion. + This implements the method described in (Ghorbani and Zou, 2019)1 with a double stopping criterion. - .. todo:: - Think of how to add Robin-Gelman or some other more principled stopping - criterion. + !!! Todo + Think of how to add Robin-Gelman or some other more principled stopping + criterion. Instead of naively implementing the expectation, we sequentially add points to coalitions from a permutation and incrementally compute marginal utilities. @@ -311,7 +305,7 @@ def combinatorial_montecarlo_shapley( This consists of randomly sampling subsets of the power set of the training indices in [u.data][pydvl.utils.utility.Utility], and computing their - marginal utilities. See [Data valuation][computing-data-values] for details. + marginal utilities. See [Data valuation][data-valuation] for details. Note that because sampling is done with replacement, the approximation is poor even for $2^{m}$ subsets with $m>n$, even though there are $2^{n-1}$ diff --git a/src/pydvl/value/shapley/naive.py b/src/pydvl/value/shapley/naive.py index 031925681..8323d5582 100644 --- a/src/pydvl/value/shapley/naive.py +++ b/src/pydvl/value/shapley/naive.py @@ -1,3 +1,16 @@ +r""" +This module implements exact Shapley values using either the combinatorial or +permutation definition. + +The exact computation of $n$ values takes $\mathcal{O}(2^n)$ evaluations of the +utility and is therefore only possible for small datasets. For larger datasets, +consider using any of the approximations, such as [Monte +Carlo][pydvl.value.shapley.montecarlo], or proxy models like +[kNN][pydvl.value.shapley.knn]. + +See [Data valuation][data-valuation] for details. +""" + import math import warnings from itertools import permutations @@ -18,9 +31,10 @@ def permutation_exact_shapley(u: Utility, *, progress: bool = True) -> ValuationResult: r"""Computes the exact Shapley value using the formulation with permutations: - $$v_u(x_i) = \frac{1}{n!} \sum_{\sigma \in \Pi(n)} [u(\sigma_{i-1} \cup {i}) − u(\sigma_{i})].$$ + $$v_u(x_i) = \frac{1}{n!} \sum_{\sigma \in \Pi(n)} [u(\sigma_{i-1} + \cup {i}) − u(\sigma_{i})].$$ - See [Data valuation][computing-data-values] for details. + See [Data valuation][data-valuation] for details. When the length of the training set is > 10 this prints a warning since the computation becomes too expensive. Used mostly for internal testing and @@ -98,16 +112,17 @@ def combinatorial_exact_shapley( ) -> ValuationResult: r"""Computes the exact Shapley value using the combinatorial definition. - $$v_u(i) = \frac{1}{n} \sum_{S \subseteq N \setminus \{i\}} \binom{n-1}{ | S | }^{-1} [u(S \cup \{i\}) − u(S)].$$ + $$v_u(i) = \frac{1}{n} \sum_{S \subseteq N \setminus \{i\}} + \binom{n-1}{ | S | }^{-1} [u(S \cup \{i\}) − u(S)].$$ - See [Data valuation][computing-data-values] for details. + See [Data valuation][data-valuation] for details. !!! Note If the length of the training set is > n_jobs*20 this prints a warning - because the computation is very expensive. Used mostly for internal testing - and simple use cases. Please refer to the - [Monte Carlo][pydvl.value.shapley.montecarlo] approximations for practical - applications. + because the computation is very expensive. Used mostly for internal + testing and simple use cases. Please refer to the + [Monte Carlo][pydvl.value.shapley.montecarlo] approximations for + practical applications. Args: u: Utility object with model, data, and scoring function diff --git a/src/pydvl/value/shapley/owen.py b/src/pydvl/value/shapley/owen.py index 2d7cde6ba..d13ed3934 100644 --- a/src/pydvl/value/shapley/owen.py +++ b/src/pydvl/value/shapley/owen.py @@ -25,6 +25,13 @@ class OwenAlgorithm(Enum): + """Choices for the Owen sampling method. + + Attributes: + Standard: Use q ∈ [0, 1] + Antithetic: Use q ∈ [0, 0.5] and correlated samples + """ + Standard = "standard" Antithetic = "antithetic" diff --git a/src/pydvl/value/shapley/truncated.py b/src/pydvl/value/shapley/truncated.py index 43327db0b..d62f625fa 100644 --- a/src/pydvl/value/shapley/truncated.py +++ b/src/pydvl/value/shapley/truncated.py @@ -11,7 +11,6 @@ from typing import Optional, cast import numpy as np -from deprecate import deprecated from pydvl.parallel.config import ParallelConfig from pydvl.utils import Utility, running_moments @@ -24,7 +23,6 @@ "FixedTruncation", "BootstrapTruncation", "RelativeTruncation", - "truncated_montecarlo_shapley", ] @@ -186,50 +184,3 @@ def _check(self, idx: int, score: float) -> bool: def reset(self, u: Optional[Utility] = None): self.count = 0 self.variance = self.mean = 0 - - -@deprecated( - target=True, - deprecated_in="0.7.0", - remove_in="0.8.0", - args_mapping=dict(coordinator_update_period=None, worker_update_period=None), -) -def truncated_montecarlo_shapley( - u: Utility, - *, - done: StoppingCriterion, - truncation: TruncationPolicy, - config: ParallelConfig = ParallelConfig(), - n_jobs: int = 1, - coordinator_update_period: int = 10, - worker_update_period: int = 5, -) -> ValuationResult: - """ - !!! Warning - This method is deprecated and only a wrapper for - [permutation_montecarlo_shapley][pydvl.value.shapley.montecarlo.permutation_montecarlo_shapley]. - - !!! Todo - Think of how to add Robin-Gelman or some other more principled stopping - criterion. - - Args: - u: Utility object with model, data, and scoring function - done: Check on the results which decides when to stop sampling - permutations. - truncation: callable that decides whether to stop computing marginals - for a given permutation. - config: Object configuring parallel computation, with cluster address, - number of cpus, etc. - n_jobs: Number of permutation monte carlo jobs to run concurrently. - Returns: - Object with the data values. - """ - from pydvl.value.shapley.montecarlo import permutation_montecarlo_shapley - - return cast( - ValuationResult, - permutation_montecarlo_shapley( - u, done=done, truncation=truncation, config=config, n_jobs=n_jobs - ), - ) diff --git a/src/pydvl/value/stopping.py b/src/pydvl/value/stopping.py index 4ce4b27e8..56dad2695 100644 --- a/src/pydvl/value/stopping.py +++ b/src/pydvl/value/stopping.py @@ -125,7 +125,6 @@ from typing import Callable, Optional, Protocol, Type import numpy as np -from deprecate import deprecated, void from numpy.typing import NDArray from pydvl.utils import Status @@ -135,7 +134,6 @@ "make_criterion", "AbsoluteStandardError", "StoppingCriterion", - "StandardError", "MaxChecks", "MaxUpdates", "MinUpdates", @@ -171,9 +169,8 @@ class StoppingCriterion(abc.ABC): also supported. These boolean operations act according to the following rules: - - The results of [_check][pydvl.value.stopping.StoppingCriterion._check] are - combined with the operator. See [Status][pydvl.utils.status.Status] for - the truth tables. + - The results of `check()` are combined with the operator. See + [Status][pydvl.utils.status.Status] for the truth tables. - The results of [converged][pydvl.value.stopping.StoppingCriterion.converged] are combined with the operator (returning another boolean array). @@ -187,17 +184,15 @@ class StoppingCriterion(abc.ABC): # Subclassing - Subclassing this class requires implementing a - [_check][pydvl.value.stopping.StoppingCriterion._check] method that + Subclassing this class requires implementing a `check()` method that returns a [Status][pydvl.utils.status.Status] object based on a given [ValuationResult][pydvl.value.result.ValuationResult]. This method should - update the attribute [_converged][pydvl.value.stopping.StoppingCriterion._converged], - which is a boolean array indicating whether the value for each index has - converged. When this does not make sense for a particular stopping criterion, + update the attribute `_converged`, which is a boolean array indicating + whether the value for each index has converged. + When this does not make sense for a particular stopping criterion, [completion][pydvl.value.stopping.StoppingCriterion.completion] should be overridden to provide an overall completion value, since its default - implementation attempts to compute the mean of - [_converged][pydvl.value.stopping.StoppingCriterion._converged]. + implementation attempts to compute the mean of `_converged`. Args: modify_result: If `True` the status of the input @@ -235,7 +230,7 @@ def converged(self) -> NDArray[np.bool_]: for each data point. Inheriting classes must set the `_converged` attribute in their - [_check][pydvl.value.stopping.StoppingCriterion._check]. + `check()`. Returns: A boolean array indicating whether the values have converged for @@ -243,21 +238,11 @@ def converged(self) -> NDArray[np.bool_]: """ return self._converged - @property - def name(self): - log = logging.getLogger(__name__) - # This string for the benefit of deprecation searches: - # remove_in="0.8.0" - log.warning( - "The `name` attribute of `StoppingCriterion` is deprecated and will be removed in 0.8.0. " - ) - return getattr(self, "_name", type(self).__name__) - def __str__(self): return type(self).__name__ def __call__(self, result: ValuationResult) -> Status: - """Calls [_check][pydvl.value.stopping.StoppingCriterion._check], maybe updating the result.""" + """Calls `check()`, maybe updating the result.""" if len(result) == 0: logger.warning( "At least one iteration finished but no results where generated. " @@ -389,12 +374,6 @@ def __str__(self): return f"AbsoluteStandardError(threshold={self.threshold}, fraction={self.fraction}, burn_in={self.burn_in})" -class StandardError(AbsoluteStandardError): - @deprecated(target=AbsoluteStandardError, deprecated_in="0.6.0", remove_in="0.8.0") - def __init__(self, *args, **kwargs): - void(*args, **kwargs) - - class MaxChecks(StoppingCriterion): """Terminate as soon as the number of checks exceeds the threshold. diff --git a/tests/influence/test_influence_calculator.py b/tests/influence/test_influence_calculator.py index a4d117478..854321f8f 100644 --- a/tests/influence/test_influence_calculator.py +++ b/tests/influence/test_influence_calculator.py @@ -1,11 +1,10 @@ -import logging -import shutil import uuid import dask.array as da import numpy as np import pytest import torch +import zarr from distributed import Client from torch import nn from torch.utils.data import DataLoader, TensorDataset @@ -28,6 +27,11 @@ DirectInfluence, EkfacInfluence, ) +from pydvl.influence.torch.influence_function_model import NystroemSketchInfluence +from pydvl.influence.torch.pre_conditioner import ( + JacobiPreConditioner, + NystroemPreConditioner, +) from pydvl.influence.torch.util import ( NestedTorchCatAggregator, TorchCatAggregator, @@ -70,7 +74,10 @@ def influence_model(model_and_data, test_case, influence_factory): "influence_factory", [ lambda model, loss, train_dataLoader, hessian_reg: CgInfluence( - model, loss, hessian_reg + model, + loss, + hessian_reg, + use_block_cg=True, ).fit(train_dataLoader), lambda model, loss, train_dataLoader, hessian_reg: DirectInfluence( model, loss, hessian_reg @@ -80,8 +87,14 @@ def influence_model(model_and_data, test_case, influence_factory): loss, hessian_regularization=hessian_reg, ).fit(train_dataLoader), + lambda model, loss, train_dataLoader, hessian_reg: NystroemSketchInfluence( + model, + loss, + rank=5, + hessian_regularization=hessian_reg, + ).fit(train_dataLoader), ], - ids=["cg", "direct", "arnoldi"], + ids=["cg", "direct", "arnoldi", "nystroem-sketch"], ) def test_dask_influence_factors(influence_factory, test_case, model_and_data): model, loss, x_train, y_train, x_test, y_test = model_and_data @@ -275,7 +288,7 @@ def test_thread_safety_violation_error( ) -def test_sequential_calculator(model_and_data, test_case): +def test_sequential_calculator(model_and_data, test_case, mocker): model, loss, x_train, y_train, x_test, y_test = model_and_data train_dataloader = DataLoader( TensorDataset(x_train, y_train), batch_size=test_case.batch_size @@ -296,13 +309,15 @@ def test_sequential_calculator(model_and_data, test_case): seq_factors = seq_factors_lazy_array.compute(aggregator=TorchCatAggregator()) torch_factors = inf_model.influence_factors(x_test, y_test) - zarr_factors_path = str(uuid.uuid4()) + + zarr_factors_store = zarr.MemoryStore() seq_factors_from_zarr = seq_factors_lazy_array.to_zarr( - zarr_factors_path, TorchNumpyConverter(), return_stored=True + zarr_factors_store, TorchNumpyConverter(), return_stored=True ) + assert torch.allclose(seq_factors, torch_factors, atol=1e-6) - assert np.allclose(torch_factors.numpy(), seq_factors_from_zarr, atol=1e-6) - shutil.rmtree(zarr_factors_path) + assert np.allclose(seq_factors_from_zarr, torch_factors, atol=1e-6) + del zarr_factors_store torch_values_from_factors = inf_model.influences_from_factors( torch_factors, x_train, y_train, mode=test_case.mode @@ -320,24 +335,25 @@ def test_sequential_calculator(model_and_data, test_case): seq_values_from_factors = seq_values_from_factors_lazy_array.compute( aggregator=NestedTorchCatAggregator() ) - zarr_values_from_factors_path = str(uuid.uuid4()) + zarr_values_from_factors_store = zarr.MemoryStore() seq_values_from_factors_from_zarr = seq_values_from_factors_lazy_array.to_zarr( - zarr_values_from_factors_path, TorchNumpyConverter(), return_stored=True + zarr_values_from_factors_store, TorchNumpyConverter(), return_stored=True ) assert torch.allclose(seq_values_from_factors, torch_values_from_factors, atol=1e-6) assert np.allclose( seq_values_from_factors_from_zarr, torch_values_from_factors.numpy(), atol=1e-6 ) - shutil.rmtree(zarr_values_from_factors_path) + del zarr_values_from_factors_store seq_values_lazy_array = seq_calculator.influences( test_dataloader, train_dataloader, mode=test_case.mode ) seq_values = seq_values_lazy_array.compute(aggregator=NestedTorchCatAggregator()) - zarr_values_path = str(uuid.uuid4()) + + zarr_values_store = zarr.MemoryStore() seq_values_from_zarr = seq_values_lazy_array.to_zarr( - zarr_values_path, TorchNumpyConverter(), return_stored=True + zarr_values_store, TorchNumpyConverter(), return_stored=True ) torch_values = inf_model.influences( @@ -345,7 +361,7 @@ def test_sequential_calculator(model_and_data, test_case): ) assert torch.allclose(seq_values, torch_values, atol=1e-6) assert np.allclose(seq_values_from_zarr, torch_values.numpy(), atol=1e-6) - shutil.rmtree(zarr_values_path) + del zarr_values_store @pytest.mark.torch diff --git a/tests/influence/torch/test_functional.py b/tests/influence/torch/test_functional.py index cc596a1c7..5c7b90b50 100644 --- a/tests/influence/torch/test_functional.py +++ b/tests/influence/torch/test_functional.py @@ -14,6 +14,7 @@ from torch.utils.data import DataLoader, TensorDataset from pydvl.influence.torch.functional import ( + LowRankProductRepresentation, create_batch_loss_function, create_hvp_function, create_matrix_jacobian_product_function, @@ -21,6 +22,7 @@ create_per_sample_mixed_derivative_function, hessian, hvp, + randomized_nystroem_approximation, ) from pydvl.influence.torch.util import align_structure, flatten_dimensions @@ -192,3 +194,38 @@ def test_mixed_derivatives(in_features, out_features, train_set_size): torch.as_tensor(test_derivative), flat_functorch_mixed_derivatives.transpose(2, 1), ) + + +@pytest.mark.parametrize("dim,rank", [(2, 1), (10, 5), (20, 20)]) +@pytest.mark.torch +def test_randomized_nystroem_approximation(dim: int, rank: int): + # Define a symmetric positive definite matrix A + v = torch.randn(dim, rank, dtype=torch.float32) + # v = torch.tensor([2.0, 3.0], dtype=torch.float32) + + # Construct the low-rank matrix A as vv^T + A = torch.matmul(v, v.t()) + + # Define the mat_vec function for matrix A + def mat_vec(x): + return A @ x + + # Parameters + input_type = torch.float32 + mat_vec_device = torch.device("cpu") + + # Call the function under test + result = randomized_nystroem_approximation(mat_vec, dim, rank, input_type) + + # Check if the result is an instance of LowRankProductRepresentation + assert isinstance( + result, LowRankProductRepresentation + ), "Result should be an instance of LowRankProductRepresentation" + + # Reconstruct the approximation of A from the result + U, Sigma = result.projections, result.eigen_vals + A_approx = torch.matmul(U, U.t() * Sigma.unsqueeze(-1)) + # Verify that the approximation is close to the original A + assert torch.allclose( + A, A_approx, atol=1e-5, rtol=1e-3 + ), "The approximation should be close to the original matrix within a tolerance" diff --git a/tests/influence/torch/test_influence_model.py b/tests/influence/torch/test_influence_model.py index 9472ad398..0631c60fc 100644 --- a/tests/influence/torch/test_influence_model.py +++ b/tests/influence/torch/test_influence_model.py @@ -15,6 +15,12 @@ DirectInfluence, EkfacInfluence, LissaInfluence, + NystroemSketchInfluence, +) +from pydvl.influence.torch.pre_conditioner import ( + JacobiPreConditioner, + NystroemPreConditioner, + PreConditioner, ) from tests.influence.torch.conftest import minimal_training @@ -316,8 +322,18 @@ def direct_factors( ).fit(train_dataLoader), 1e-4, ], + [ + lambda model, loss, train_dataLoader, hessian_reg: CgInfluence( + model, + loss, + hessian_regularization=hessian_reg, + pre_conditioner=NystroemPreConditioner(10), + use_block_cg=True, + ).fit(train_dataLoader), + 1e-4, + ], ], - ids=["cg", "lissa", "direct"], + ids=["cg", "lissa", "direct", "block-cg"], ) def test_influence_linear_model( influence_factory: Callable, @@ -393,9 +409,6 @@ def upper_quantile_equivalence( @parametrize( "influence_factory", [ - lambda model, loss, train_dataLoader, hessian_reg: CgInfluence( - model, loss, hessian_regularization=hessian_reg - ).fit(train_dataLoader), lambda model, loss, train_dataLoader, hessian_reg: LissaInfluence( model, loss, @@ -404,9 +417,9 @@ def upper_quantile_equivalence( scale=10000, ).fit(train_dataLoader), ], - ids=["cg", "lissa"], + ids=["lissa"], ) -def test_influences_nn( +def test_influences_lissa( test_case: TestCase, model_and_data: Tuple[ torch.nn.Module, @@ -454,7 +467,23 @@ def test_influences_nn( assert np.allclose(approx_influences, direct_influences, rtol=1e-1) -def test_influences_arnoldi( +@pytest.mark.parametrize( + "influence_factory", + [ + lambda model, loss, hessian_reg, rank: ArnoldiInfluence( + model, + loss, + hessian_regularization=hessian_reg, + rank_estimate=rank, + precompute_grad=True, + ), + lambda model, loss, hessian_reg, rank: NystroemSketchInfluence( + model, loss, hessian_regularization=hessian_reg, rank=rank + ), + ], + ids=["arnoldi", "nystroem"], +) +def test_influences_low_rank( test_case: TestCase, model_and_data: Tuple[ torch.nn.Module, @@ -467,7 +496,10 @@ def test_influences_arnoldi( direct_influences, direct_sym_influences, direct_factors, + influence_factory, ): + atol = 1e-8 + rtol = 1e-5 model, loss, x_train, y_train, x_test, y_test = model_and_data num_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad) @@ -476,55 +508,62 @@ def test_influences_arnoldi( TensorDataset(x_train, y_train), batch_size=test_case.batch_size ) - arnoldi_influence = ArnoldiInfluence( + influence_func_model = influence_factory( model, loss, - hessian_regularization=test_case.hessian_reg, - rank_estimate=num_parameters - 1, + test_case.hessian_reg, + num_parameters - 1, ) with pytest.raises(NotFittedException): - arnoldi_influence.influences( + influence_func_model.influences( x_test, y_test, x_train, y_train, mode=test_case.mode ) with pytest.raises(NotFittedException): - arnoldi_influence.influence_factors(x_test, y_test) + influence_func_model.influence_factors(x_test, y_test) - arnoldi_influence = arnoldi_influence.fit(train_dataloader) + influence_func_model = influence_func_model.fit(train_dataloader) - low_rank_influence = arnoldi_influence.influences( + low_rank_influence = influence_func_model.influences( x_test, y_test, x_train, y_train, mode=test_case.mode ).numpy() - sym_low_rank_influence = arnoldi_influence.influences( + sym_low_rank_influence = influence_func_model.influences( x_train, y_train, mode=test_case.mode ).numpy() - low_rank_factors = arnoldi_influence.influence_factors(x_test, y_test) + low_rank_factors = influence_func_model.influence_factors(x_test, y_test) assert np.allclose( - direct_factors, arnoldi_influence.influence_factors(x_train, y_train).numpy() + direct_factors, + influence_func_model.influence_factors(x_train, y_train).numpy(), + atol=atol, + rtol=rtol, ) if test_case.mode is InfluenceMode.Up: - low_rank_influence_transpose = arnoldi_influence.influences( + low_rank_influence_transpose = influence_func_model.influences( x_train, y_train, x_test, y_test, mode=test_case.mode ).numpy() assert np.allclose( low_rank_influence_transpose, low_rank_influence.swapaxes(0, 1) ) - low_rank_values_from_factors = arnoldi_influence.influences_from_factors( + low_rank_values_from_factors = influence_func_model.influences_from_factors( low_rank_factors, x_train, y_train, mode=test_case.mode ).numpy() - assert np.allclose(direct_influences, low_rank_influence) - assert np.allclose(direct_sym_influences, sym_low_rank_influence) - assert np.allclose(low_rank_influence, low_rank_values_from_factors) + assert np.allclose(direct_influences, low_rank_influence, atol=atol, rtol=rtol) + assert np.allclose( + direct_sym_influences, sym_low_rank_influence, atol=atol, rtol=rtol + ) + assert np.allclose( + low_rank_influence, low_rank_values_from_factors, atol=atol, rtol=rtol + ) with pytest.raises(ValueError): - arnoldi_influence.influences(x_test, y_test, x=x_train, mode=test_case.mode) + influence_func_model.influences(x_test, y_test, x=x_train, mode=test_case.mode) with pytest.raises(ValueError): - arnoldi_influence.influences(x_test, y_test, y=y_train, mode=test_case.mode) + influence_func_model.influences(x_test, y_test, y=y_train, mode=test_case.mode) def test_influences_ekfac( @@ -591,3 +630,78 @@ def test_influences_ekfac( assert np.allclose(ekfac_influence_values, accumulated_inf_by_layer) check_influence_correlations(direct_influences, ekfac_influence_values) check_influence_correlations(direct_sym_influences, ekfac_self_influence) + + +@pytest.mark.torch +@pytest.mark.parametrize("use_block_cg", [True, False]) +@pytest.mark.parametrize( + "pre_conditioner", + [ + JacobiPreConditioner(), + NystroemPreConditioner(rank=5), + None, + ], +) +def test_influences_cg( + test_case: TestCase, + model_and_data: Tuple[ + torch.nn.Module, + Callable[[torch.Tensor, torch.Tensor], torch.Tensor], + torch.Tensor, + torch.Tensor, + torch.Tensor, + torch.Tensor, + ], + direct_influences, + direct_factors, + use_block_cg: bool, + pre_conditioner: PreConditioner, +): + model, loss, x_train, y_train, x_test, y_test = model_and_data + + train_dataloader = DataLoader( + TensorDataset(x_train, y_train), batch_size=test_case.batch_size + ) + influence_model = CgInfluence( + model, + loss, + test_case.hessian_reg, + maxiter=5, + pre_conditioner=pre_conditioner, + use_block_cg=use_block_cg, + ) + influence_model = influence_model.fit(train_dataloader) + + approx_influences = influence_model.influences( + x_test, y_test, x_train, y_train, mode=test_case.mode + ).numpy() + + assert not np.any(np.isnan(approx_influences)) + + assert np.allclose(approx_influences, direct_influences, atol=1e-6, rtol=1e-4) + + if test_case.mode == InfluenceMode.Up: + assert approx_influences.shape == ( + test_case.test_data_len, + test_case.train_data_len, + ) + + if test_case.mode == InfluenceMode.Perturbation: + assert approx_influences.shape == ( + test_case.test_data_len, + test_case.train_data_len, + *test_case.input_dim, + ) + + # check that influences are not all constant + assert not np.all(approx_influences == approx_influences.item(0)) + + assert np.allclose(approx_influences, direct_influences, atol=1e-6, rtol=1e-4) + + # check that block variant returns the correct vector, if only one right hand side + # is provided + if use_block_cg: + single_influence = influence_model.influence_factors( + x_train[0].unsqueeze(0), y_train[0].unsqueeze(0) + ).numpy() + assert np.allclose(single_influence, direct_factors[0], atol=1e-6, rtol=1e-4) diff --git a/tests/influence/torch/test_pre_conditioner.py b/tests/influence/torch/test_pre_conditioner.py new file mode 100644 index 000000000..8aa05b863 --- /dev/null +++ b/tests/influence/torch/test_pre_conditioner.py @@ -0,0 +1,87 @@ +import pytest +import torch + +from pydvl.influence.torch.pre_conditioner import ( + JacobiPreConditioner, + NystroemPreConditioner, +) + + +def high_cond_diagonal_dominant_matrix(size, high_value=1e5, low_value=1e-2): + """Generates a diagonal dominant matrix with a high condition number.""" + A = torch.randn(size, size) * low_value # Small off-diagonal elements + for i in range(size): + A[i, i] = high_value if i % 2 == 0 else low_value + + return A.T @ A + + +def approx_low_rank_matrix(size, rank): + """Generates an approximately low-rank matrix.""" + U = torch.randn(size, rank) + return U @ U.T + 1e-1 * torch.eye(size) + + +@pytest.fixture +def high_cond_mat(): + size = 100 # Example size + return high_cond_diagonal_dominant_matrix(size) + + +@pytest.fixture +def low_rank_mat(): + size = 1000 # Example size + rank = 50 + return approx_low_rank_matrix(size, rank) + + +@pytest.mark.parametrize("num_samples_estimator", [1, 3, 5]) +def test_jacobi_preconditioner_condition_number(high_cond_mat, num_samples_estimator): + preconditioner = JacobiPreConditioner(num_samples_estimator=num_samples_estimator) + size = high_cond_mat.shape[0] + regularization = 0.1 + + # Original matrix and its condition number + A = high_cond_mat + original_cond_number = torch.linalg.cond(A + regularization * torch.eye(size)) + + preconditioner.fit( + lambda x: A @ x, size, high_cond_mat.dtype, high_cond_mat.device, regularization + ) + assert preconditioner.is_fitted + + preconditioned_A = preconditioner.solve(A + regularization * torch.eye(size)) + preconditioned_cond_number = torch.linalg.cond(preconditioned_A) + + # Assert that the condition number has decreased + assert preconditioned_cond_number < original_cond_number * 10 ** ( + -0.5 * (num_samples_estimator) + ) + + +def test_nystroem_preconditioner_condition_number(low_rank_mat): + preconditioner = NystroemPreConditioner(60) + size = low_rank_mat.shape[0] + regularization = 1e-2 + + # Original matrix and its condition number + original_cond_number = torch.linalg.cond( + low_rank_mat + regularization * torch.eye(size) + ) + + preconditioner.fit( + lambda x: low_rank_mat @ x, + low_rank_mat.shape[0], + low_rank_mat.dtype, + low_rank_mat.device, + regularization, + ) + assert preconditioner.is_fitted + + preconditioned_A = preconditioner.solve( + low_rank_mat + regularization * torch.eye(size) + ) + preconditioned_cond_number = torch.linalg.cond(preconditioned_A) + + # Assert that the condition number has decreased + assert preconditioned_cond_number < original_cond_number * 1e-1 diff --git a/tests/parallel/__init__.py b/tests/parallel/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/utils/conftest.py b/tests/parallel/conftest.py similarity index 88% rename from tests/utils/conftest.py rename to tests/parallel/conftest.py index 1bbb7dea0..1326fa7ca 100644 --- a/tests/utils/conftest.py +++ b/tests/parallel/conftest.py @@ -11,7 +11,8 @@ def parallel_config(request): yield ParallelConfig(backend="joblib", n_cpus_local=num_workers()) elif request.param == "ray-local": ray = pytest.importorskip("ray", reason="Ray not installed.") - yield ParallelConfig(backend="ray", n_cpus_local=num_workers()) + ray.init(num_cpus=num_workers()) + yield ParallelConfig(backend="ray") ray.shutdown() elif request.param == "ray-external": ray = pytest.importorskip("ray", reason="Ray not installed.") @@ -22,6 +23,7 @@ def parallel_config(request): cluster = Cluster( initialize_head=True, head_node_args={"num_cpus": num_workers()} ) + ray.init(cluster.address) yield ParallelConfig(backend="ray", address=cluster.address) ray.shutdown() cluster.shutdown() diff --git a/tests/utils/test_parallel.py b/tests/parallel/test_parallel.py similarity index 100% rename from tests/utils/test_parallel.py rename to tests/parallel/test_parallel.py diff --git a/tests/test_results.py b/tests/test_results.py index 4ea80cf72..0b42fb48d 100644 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -4,6 +4,7 @@ import operator import pickle from copy import deepcopy +from itertools import permutations import cloudpickle import numpy as np @@ -159,6 +160,20 @@ def test_updating(): assert v.counts[1] == 2 +def test_updating_order_invariance(): + updates = [0.8, 0.9, 1.0, 1.1, 1.2] + values = [] + for permutation in permutations(updates): + v = ValuationResult.zeros(indices=np.array([0])) + for update in permutation: + v.update(0, update) + values.append(v) + + v1 = values[0] + for v2 in values[1:]: + np.testing.assert_almost_equal(v1.values, v2.values) + + @pytest.mark.parametrize( "serialize, deserialize", [(pickle.dumps, pickle.loads), (cloudpickle.dumps, cloudpickle.loads)], @@ -415,8 +430,3 @@ def test_empty(n): v2 = ValuationResult(values=np.arange(n)) v += v2 assert len(v2) == n - - -def test_empty_deprecation(): - with pytest.warns(DeprecationWarning): - v3 = ValuationResult.empty(indices=[1, 2, 3]) diff --git a/tests/utils/test_caching.py b/tests/utils/test_caching.py index facd3d10f..846a6dba6 100644 --- a/tests/utils/test_caching.py +++ b/tests/utils/test_caching.py @@ -9,6 +9,7 @@ from numpy.typing import NDArray from pydvl.parallel import MapReduceJob +from pydvl.parallel.config import ParallelConfig from pydvl.utils.caching import ( CachedFunc, CachedFuncConfig, @@ -18,9 +19,16 @@ ) from pydvl.utils.types import Seed +from ..conftest import num_workers + logger = logging.getLogger(__name__) +@pytest.fixture(scope="module") +def parallel_config(): + return ParallelConfig(backend="joblib", n_cpus_local=num_workers()) + + def foo(indices: NDArray[np.int_], *args, **kwargs) -> float: return float(np.sum(indices)) @@ -64,7 +72,12 @@ def cache_backend(request): yield cache_backend cache_backend.clear() elif backend == "memcached": - cache_backend = MemcachedCacheBackend() + try: + cache_backend = MemcachedCacheBackend() + except ConnectionRefusedError as e: + raise RuntimeError( + f"Could not connected to Memcached server. original error message: {str(e)}" + ) yield cache_backend cache_backend.clear() else: @@ -154,9 +167,15 @@ def test_single_job(cache_backend): def test_without_pymemcache(mocker): - mocker.patch("pydvl.utils.caching.memcached.PYMEMCACHE_INSTALLED", False) - with pytest.raises(ModuleNotFoundError): - MemcachedCacheBackend() + import importlib + import sys + + mocker.patch.dict("sys.modules", {"pymemcache": None}) + with pytest.raises(ModuleNotFoundError) as err: + importlib.reload(sys.modules["pydvl.utils.caching.memcached"]) + + # error message should contain the extra install expression + assert "pyDVL[memcached]" in err.value.msg def test_memcached_failed_connection(): @@ -205,12 +224,10 @@ def test_cache_ignore_args(cache_backend): def test_parallel_jobs(cache_backend, parallel_config): if not isinstance(cache_backend, MemcachedCacheBackend): pytest.skip("Only running this test with MemcachedCacheBackend") - if parallel_config.backend != "joblib": - pytest.skip("We don't have to test this with all parallel backends") # Note that we typically do NOT want to ignore run_id cached_func_config = CachedFuncConfig( - ignore_args=["job_id", "run_id"], + ignore_args=["job_id", "run_id"], time_threshold=0 ) wrapped_foo = cache_backend.wrap(foo, config=cached_func_config) @@ -296,9 +313,6 @@ def test_faster_with_repeated_training(cache_backend, worker_id: str): def test_parallel_repeated_training( cache_backend, n, atol, n_jobs, n_runs, parallel_config ): - if parallel_config.backend != "joblib": - pytest.skip("We don't have to test this with all parallel backends") - def map_func(indices: NDArray[np.int_], seed: Optional[Seed] = None) -> float: return np.sum(indices).item() + np.random.normal(scale=1) diff --git a/tests/utils/test_score.py b/tests/utils/test_score.py index 078775240..5423c48be 100644 --- a/tests/utils/test_score.py +++ b/tests/utils/test_score.py @@ -1,5 +1,7 @@ import numpy as np +import sklearn from numpy.typing import NDArray +from packaging import version from pydvl.utils.score import Scorer, compose_score, squashed_r2, squashed_variance @@ -24,7 +26,13 @@ def test_scorer(): """Tests the Scorer class.""" scorer = Scorer("r2") assert str(scorer) == "r2" - assert repr(scorer) == "R2 (scorer=make_scorer(r2_score))" + if version.parse(sklearn.__version__) >= version.parse("1.4.0"): + assert ( + repr(scorer) + == "R2 (scorer=make_scorer(r2_score, response_method='predict'))" + ) + else: + assert repr(scorer) == "R2 (scorer=make_scorer(r2_score))" coef = np.array([1, 2]) X = np.array([[1, 2], [3, 4]]) diff --git a/tests/value/__init__.py b/tests/value/__init__.py index 4b27711c4..19a703d2d 100644 --- a/tests/value/__init__.py +++ b/tests/value/__init__.py @@ -19,7 +19,9 @@ def check_total_value( Shapley value is supposed to fulfill the total value axiom.""" total_utility = u(u.data.indices) # We can use relative tolerances if we don't have the range of the scorer. - assert np.isclose(np.sum(values.values), total_utility, rtol=rtol, atol=atol) + np.testing.assert_allclose( + np.sum(values.values), total_utility, rtol=rtol, atol=atol + ) def check_exact( @@ -33,10 +35,14 @@ def check_exact( values.sort() exact_values.sort() - assert np.all(values.indices == exact_values.indices), "Ranks do not match" - assert np.allclose( - values.values, exact_values.values, rtol=rtol, atol=atol - ), "Values do not match" + np.testing.assert_equal(values.indices, exact_values.indices, "Ranks do not match") + np.testing.assert_allclose( + values.values, + exact_values.values, + rtol=rtol, + atol=atol, + err_msg="Values do not match", + ) def check_values( @@ -66,9 +72,9 @@ def check_values( values.sort() exact_values.sort() - assert np.allclose(values.values, exact_values.values, rtol=rtol, atol=atol) + np.testing.assert_allclose(values.values, exact_values.values, rtol=rtol, atol=atol) for name in extra_values_names: - assert np.isclose( + np.testing.assert_allclose( getattr(values, name), getattr(exact_values, name), rtol=rtol, atol=atol ) diff --git a/tests/value/conftest.py b/tests/value/conftest.py index 0e3c48d29..139f0f5b6 100644 --- a/tests/value/conftest.py +++ b/tests/value/conftest.py @@ -11,12 +11,35 @@ from pydvl.utils.caching import InMemoryCacheBackend from pydvl.utils.status import Status from pydvl.value import ValuationResult +from pydvl.value.games import ( + AsymmetricVotingGame, + Game, + MinerGame, + ShoesGame, + SymmetricVotingGame, +) from pydvl.value.shapley.naive import combinatorial_exact_shapley from ..conftest import num_workers from . import polynomial +@pytest.fixture(scope="module") +def test_game(request) -> Game: + name, kwargs = request.param + if name == "miner": + game = MinerGame(n_players=kwargs["n_players"]) + elif name == "shoes": + game = ShoesGame(left=kwargs["left"], right=kwargs["right"]) + elif name == "symmetric-voting": + game = SymmetricVotingGame(n_players=kwargs["n_players"]) + elif name == "asymmetric-voting": + game = AsymmetricVotingGame() + else: + raise ValueError(f"Unknown game '{name}'") + return game + + @pytest.fixture(scope="function") def polynomial_dataset(coefficients: np.ndarray): """Coefficients must be for monomials of increasing degree""" diff --git a/tests/value/least_core/conftest.py b/tests/value/least_core/conftest.py deleted file mode 100644 index 2355c443a..000000000 --- a/tests/value/least_core/conftest.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Tuple - -import numpy as np -import pytest - -from pydvl.utils import Utility -from pydvl.utils.status import Status -from pydvl.utils.utility import GlovesGameUtility, MinerGameUtility -from pydvl.value.result import ValuationResult - - -@pytest.fixture(scope="module") -def test_utility(request) -> Tuple[Utility, ValuationResult]: - name, kwargs = request.param - if name == "miner": - u = MinerGameUtility(**kwargs) - elif name == "gloves": - u = GlovesGameUtility(**kwargs) - else: - raise ValueError(f"Unknown '{name}'") - exact_values, subsidy = u.exact_least_core_values() - result = ValuationResult( - algorithm="exact", - values=exact_values, - subsidy=subsidy, - variances=np.zeros_like(exact_values), - data_names=np.arange(len(exact_values)), - status=Status.Converged, - ) - return u, result diff --git a/tests/value/least_core/test_common.py b/tests/value/least_core/test_common.py index feadeb954..6add2d12a 100644 --- a/tests/value/least_core/test_common.py +++ b/tests/value/least_core/test_common.py @@ -8,29 +8,30 @@ @pytest.mark.parametrize( - "test_utility", - [("miner", {"n_miners": 5})], + "test_game", + [("miner", {"n_players": 5})], indirect=True, ) -def test_lc_solve_problems(test_utility, n_jobs, parallel_config): +def test_lc_solve_problems(test_game, n_jobs, parallel_config): """Test solving LeastCoreProblems in parallel.""" - u, exact_values = test_utility n_problems = n_jobs - problem = lc_prepare_problem(u) + problem = lc_prepare_problem(test_game.u) solutions = lc_solve_problems( [problem] * n_problems, - u, + test_game.u, algorithm="test_lc", n_jobs=n_jobs, config=parallel_config, ) assert len(solutions) == n_problems + exact_values = test_game.least_core_values() + for solution in solutions: assert solution.status == Status.Converged check_values(solution, exact_values, rtol=0.01) - check = lc_solve_problem(problem, u=u, algorithm="test_lc") + check = lc_solve_problem(problem, u=test_game.u, algorithm="test_lc") assert check.status == Status.Converged check_values(solution, check, rtol=0.01) diff --git a/tests/value/least_core/test_montecarlo.py b/tests/value/least_core/test_montecarlo.py index 38d675e0d..8b926a3bf 100644 --- a/tests/value/least_core/test_montecarlo.py +++ b/tests/value/least_core/test_montecarlo.py @@ -10,28 +10,27 @@ @pytest.mark.parametrize( - "test_utility, rtol, n_iterations", + "test_game, rtol, n_iterations", [ - (("miner", {"n_miners": 8}), 0.1, 128), - (("gloves", {"left": 10, "right": 5}), 0.2, 10000), + (("miner", {"n_players": 8}), 0.1, 128), + (("shoes", {"left": 10, "right": 5}), 0.2, 10000), ], - indirect=["test_utility"], + indirect=["test_game"], ) @pytest.mark.parametrize("n_jobs", [1, -1]) @pytest.mark.parametrize("non_negative_subsidy", (True, False)) def test_montecarlo_least_core( - test_utility, rtol, n_iterations, n_jobs, non_negative_subsidy, seed + test_game, rtol, n_iterations, n_jobs, non_negative_subsidy, seed ): - u, exact_values = test_utility - values = montecarlo_least_core( - u, + test_game.u, n_iterations=n_iterations, non_negative_subsidy=non_negative_subsidy, progress=False, n_jobs=n_jobs, seed=seed, ) + exact_values = test_game.least_core_values() if non_negative_subsidy: check_values(values, exact_values) # Sometimes the subsidy is negative but really close to zero diff --git a/tests/value/least_core/test_naive.py b/tests/value/least_core/test_naive.py index 28a79e381..a972e72c0 100644 --- a/tests/value/least_core/test_naive.py +++ b/tests/value/least_core/test_naive.py @@ -6,23 +6,23 @@ @pytest.mark.parametrize( - "test_utility", + "test_game", [ - ("miner", {"n_miners": 3}), - ("miner", {"n_miners": 4}), - ("gloves", {"left": 1, "right": 1}), - ("gloves", {"left": 2, "right": 1}), - ("gloves", {"left": 1, "right": 2}), + ("miner", {"n_players": 3}), + ("miner", {"n_players": 4}), + ("shoes", {"left": 1, "right": 1}), + ("shoes", {"left": 2, "right": 1}), + ("shoes", {"left": 1, "right": 2}), ], indirect=True, ) @pytest.mark.parametrize("non_negative_subsidy", (True, False)) -def test_naive_least_core(test_utility, non_negative_subsidy): - u, exact_values = test_utility +def test_naive_least_core(test_game, non_negative_subsidy): values = exact_least_core( - u, non_negative_subsidy=non_negative_subsidy, progress=False + test_game.u, non_negative_subsidy=non_negative_subsidy, progress=False ) - check_total_value(u, values) + check_total_value(test_game.u, values) + exact_values = test_game.least_core_values() if non_negative_subsidy: check_values(values, exact_values) # Sometimes the subsidy is negative but really close to zero diff --git a/tests/value/shapley/test_classwise.py b/tests/value/shapley/test_classwise.py index bd4f55a5d..d73e86a0b 100644 --- a/tests/value/shapley/test_classwise.py +++ b/tests/value/shapley/test_classwise.py @@ -3,7 +3,9 @@ import numpy as np import pandas as pd import pytest +import sklearn from numpy.typing import NDArray +from packaging import version from pydvl.utils import Dataset, Utility, powerset from pydvl.value import MaxChecks, ValuationResult @@ -165,7 +167,13 @@ def test_classwise_scorer_representation(): scorer = ClasswiseScorer("accuracy", initial_label=0) assert str(scorer) == "classwise accuracy" - assert repr(scorer) == "ClasswiseAccuracy (scorer=make_scorer(accuracy_score))" + if version.parse(sklearn.__version__) >= version.parse("1.4.0"): + assert ( + repr(scorer) + == "ClasswiseAccuracy (scorer=make_scorer(accuracy_score, response_method='predict'))" + ) + else: + assert repr(scorer) == "ClasswiseAccuracy (scorer=make_scorer(accuracy_score))" @pytest.mark.parametrize("n_element, left_margin, right_margin", [(101, 0.3, 0.4)]) diff --git a/tests/value/shapley/test_montecarlo.py b/tests/value/shapley/test_montecarlo.py index ef9deed1f..157ebc40f 100644 --- a/tests/value/shapley/test_montecarlo.py +++ b/tests/value/shapley/test_montecarlo.py @@ -6,7 +6,7 @@ from sklearn.linear_model import LinearRegression from pydvl.parallel.config import ParallelConfig -from pydvl.utils import Dataset, GroupedDataset, Status, Utility +from pydvl.utils import GroupedDataset, Status, Utility from pydvl.utils.numeric import num_samples_permutation_hoeffding from pydvl.utils.score import Scorer, squashed_r2 from pydvl.utils.types import Seed @@ -21,35 +21,38 @@ log = logging.getLogger(__name__) -# noinspection PyTestParametrized @pytest.mark.parametrize( - "num_samples, fun, rtol, atol, kwargs", + "test_game", [ - (12, ShapleyMode.PermutationMontecarlo, 0.1, 1e-5, {"done": MaxUpdates(10)}), - # FIXME! it should be enough with 2**(len(data)-1) samples + ("symmetric-voting", {"n_players": 6}), + ("shoes", {"left": 3, "right": 4}), + ], + indirect=["test_game"], +) +@pytest.mark.parametrize( + "fun, rtol, atol, kwargs", + [ + (ShapleyMode.PermutationMontecarlo, 0.2, 1e-4, dict(done=MaxUpdates(500))), ( - 8, ShapleyMode.CombinatorialMontecarlo, 0.2, 1e-4, - {"done": MaxUpdates(2**10)}, + dict(done=MaxUpdates(2**10)), ), - (12, ShapleyMode.Owen, 0.1, 1e-4, dict(n_samples=4, max_q=200)), - (12, ShapleyMode.OwenAntithetic, 0.1, 1e-4, dict(n_samples=4, max_q=200)), + (ShapleyMode.Owen, 0.2, 1e-4, dict(n_samples=5, max_q=200)), + (ShapleyMode.OwenAntithetic, 0.1, 1e-4, dict(n_samples=5, max_q=200)), + # Because of the inaccuracy of GroupTesting, a high atol is required for the + # value 0, for which the rtol has no effect. ( - 3, ShapleyMode.GroupTesting, 0.1, - # Because of the inaccuracy of GTS, a high atol is required for the - # value 0, for which the rtol has no effect. 1e-2, dict(n_samples=int(4e4), epsilon=0.2, delta=0.01), ), ], ) -def test_analytic_montecarlo_shapley( - num_samples, - analytic_shapley, +def test_games( + test_game, parallel_config, n_jobs, fun: ShapleyMode, @@ -58,10 +61,22 @@ def test_analytic_montecarlo_shapley( kwargs: dict, seed, ): - u, exact_values = analytic_shapley + """Tests values for all methods using a toy games. + + For permutation, the rtol for each scorer is chosen + so that the number of samples selected is just above the (ε,δ) bound for ε = + rtol, δ=0.001 and the range corresponding to each score. This means that + roughly once every 1000/num_methods runs the test will fail. + + FIXME: + - We don't have a bound for Owen. + NOTE: + - The variance in the combinatorial method is huge, so we need lots of + samples + """ values = compute_shapley_values( - u, + test_game.u, mode=fun, n_jobs=n_jobs, config=parallel_config, @@ -70,29 +85,31 @@ def test_analytic_montecarlo_shapley( **kwargs ) + exact_values = test_game.shapley_values() check_values(values, exact_values, rtol=rtol, atol=atol) @pytest.mark.slow @pytest.mark.parametrize( - "num_samples, fun, kwargs", + "test_game", + [ + ("symmetric-voting", {"n_players": 12}), + ], + indirect=["test_game"], +) +@pytest.mark.parametrize( + "fun, kwargs", [ # TODO Add once issue #416 is closed. - # (12, ShapleyMode.PermutationMontecarlo, {"done": MaxChecks(1)}), - ( - 12, - ShapleyMode.CombinatorialMontecarlo, - {"done": MaxChecks(4)}, - ), - (12, ShapleyMode.Owen, dict(n_samples=4, max_q=200)), - (12, ShapleyMode.OwenAntithetic, dict(n_samples=4, max_q=200)), - (4, ShapleyMode.GroupTesting, dict(n_samples=21, epsilon=0.2, delta=0.01)), + # (ShapleyMode.PermutationMontecarlo, dict(done=MaxChecks(1))), + (ShapleyMode.CombinatorialMontecarlo, dict(done=MaxChecks(4))), + (ShapleyMode.Owen, dict(n_samples=4, max_q=200)), + (ShapleyMode.OwenAntithetic, dict(n_samples=4, max_q=200)), + (ShapleyMode.GroupTesting, dict(n_samples=21, epsilon=0.2, delta=0.01)), ], ) -@pytest.mark.parametrize("num_points, num_features", [(12, 3)]) -def test_montecarlo_shapley_housing_dataset( - num_samples: int, - housing_dataset: Dataset, +def test_seed( + test_game, parallel_config: ParallelConfig, n_jobs: int, fun: ShapleyMode, @@ -102,11 +119,10 @@ def test_montecarlo_shapley_housing_dataset( ): values_1, values_2, values_3 = call_with_seeds( compute_shapley_values, - Utility(LinearRegression(), data=housing_dataset, scorer="r2"), + test_game.u, mode=fun, n_jobs=n_jobs, config=parallel_config, - progress=False, seeds=(seed, seed, seed_alt), **deepcopy(kwargs) ) @@ -115,10 +131,18 @@ def test_montecarlo_shapley_housing_dataset( np.testing.assert_equal(values_1.values, values_3.values) +@pytest.mark.skip( + "This test is brittle and the bound isn't sharp. " + "We should at least document the bound in the documentation." +) @pytest.mark.slow @pytest.mark.parametrize("num_samples, delta, eps", [(6, 0.1, 0.1)]) @pytest.mark.parametrize( - "fun", [ShapleyMode.PermutationMontecarlo, ShapleyMode.CombinatorialMontecarlo] + "fun", + [ + ShapleyMode.PermutationMontecarlo, + ShapleyMode.CombinatorialMontecarlo, + ], ) def test_hoeffding_bound_montecarlo( num_samples, @@ -143,62 +167,6 @@ def test_hoeffding_bound_montecarlo( check_rank_correlation(values, exact_values, threshold=0.8) -@pytest.mark.parametrize( - "a, b, num_points", [(2, 0, 21)] # training set will have 0.3 * 21 = 6 samples -) -@pytest.mark.parametrize("scorer, rtol", [(squashed_r2, 0.25)]) -@pytest.mark.parametrize( - "fun, kwargs", - [ - # FIXME: Hoeffding says 400 should be enough - (ShapleyMode.PermutationMontecarlo, dict(done=MaxUpdates(500))), - (ShapleyMode.CombinatorialMontecarlo, dict(done=MaxUpdates(2**11))), - (ShapleyMode.Owen, dict(n_samples=2, max_q=300)), - (ShapleyMode.OwenAntithetic, dict(n_samples=2, max_q=300)), - pytest.param( - ShapleyMode.GroupTesting, - dict(n_samples=int(5e4), epsilon=0.25, delta=0.1), - marks=pytest.mark.slow, - ), - ], -) -def test_linear_montecarlo_shapley( - linear_shapley, - n_jobs, - memcache_client_config, - scorer: Scorer, - rtol: float, - fun: ShapleyMode, - kwargs: dict, - seed: int, -): - """Tests values for all methods using a linear dataset. - - For permutation and truncated montecarlo, the rtol for each scorer is chosen - so that the number of samples selected is just above the (ε,δ) bound for ε = - rtol, δ=0.001 and the range corresponding to each score. This means that - roughly once every 1000/num_methods runs the test will fail. - - FIXME: - - For permutation, we must increase the number of samples above that what - is done for truncated, this is probably due to the averaging done by the - latter to reduce variance - - We don't have a bound for Owen. - NOTE: - - The variance in the combinatorial method is huge, so we need lots of - samples - - """ - u, exact_values = linear_shapley - - values = compute_shapley_values( - u, mode=fun, progress=False, n_jobs=n_jobs, seed=seed, **kwargs - ) - - check_values(values, exact_values, rtol=rtol) - check_total_value(u, values, rtol=rtol) # FIXME, could be more than rtol - - @pytest.mark.slow @pytest.mark.parametrize( "a, b, num_points", [(2, 0, 21)] # training set will have 0.3 * 21 ~= 6 samples diff --git a/tests/value/shapley/test_naive.py b/tests/value/shapley/test_naive.py index 45c32b1a9..98a18a626 100644 --- a/tests/value/shapley/test_naive.py +++ b/tests/value/shapley/test_naive.py @@ -15,55 +15,26 @@ log = logging.getLogger(__name__) -# noinspection PyTestParametrized @pytest.mark.parametrize( - "num_samples, fun, rtol, total_atol", + "test_game, rtol, total_atol", [ - (12, combinatorial_exact_shapley, 0.01, 1e-5), - (6, permutation_exact_shapley, 0.01, 1e-5), + (("symmetric-voting", {"n_players": 4}), 0.1, 1e-5), + (("shoes", {"left": 1, "right": 1}), 0.1, 1e-5), + (("shoes", {"left": 2, "right": 1}), 0.1, 1e-5), + (("shoes", {"left": 1, "right": 2}), 0.1, 1e-5), + (("shoes", {"left": 2, "right": 4}), 0.1, 1e-5), ], + indirect=["test_game"], ) -def test_analytic_exact_shapley(num_samples, analytic_shapley, fun, rtol, total_atol): - """Compares the combinatorial exact shapley and permutation exact shapley with - the analytic_shapley calculation for a dummy model. - """ - u, exact_values = analytic_shapley - values_p = fun(u, progress=False) - check_total_value(u, values_p, atol=total_atol) - check_values(values_p, exact_values, rtol=rtol) - - @pytest.mark.parametrize( - "a, b, num_points, scorer", - [ - (2, 0, 10, "r2"), - (2, 1, 10, "r2"), - (2, 1, 10, "neg_median_absolute_error"), - (2, 1, 10, "explained_variance"), - ], + "fun", + [combinatorial_exact_shapley, permutation_exact_shapley], ) -def test_linear( - linear_dataset, - memcache_client_config, - scorer, - cache_backend, - rtol=0.01, - total_atol=1e-5, -): - linear_utility = Utility( - LinearRegression(), - data=linear_dataset, - scorer=scorer, - cache_backend=cache_backend, - ) - - values_combinatorial = combinatorial_exact_shapley(linear_utility, progress=False) - check_total_value(linear_utility, values_combinatorial, atol=total_atol) - - values_permutation = permutation_exact_shapley(linear_utility, progress=False) - check_total_value(linear_utility, values_permutation, atol=total_atol) - - check_values(values_combinatorial, values_permutation, rtol=rtol) +def test_games(fun, test_game, rtol, total_atol): + values_p = fun(test_game.u) + exact_values = test_game.shapley_values() + check_total_value(test_game.u, values_p, atol=total_atol) + check_values(values_p, exact_values, rtol=rtol) @pytest.mark.parametrize( @@ -73,7 +44,6 @@ def test_linear( def test_grouped_linear( linear_dataset, num_groups, - memcache_client_config, scorer, cache_backend, rtol=0.01, @@ -112,9 +82,7 @@ def test_grouped_linear( (2, 1, 20, "r2"), ], ) -def test_linear_with_outlier( - linear_dataset, memcache_client_config, scorer, cache_backend, total_atol=1e-5 -): +def test_linear_with_outlier(linear_dataset, scorer, cache_backend, total_atol=1e-5): outlier_idx = np.random.randint(len(linear_dataset.y_train)) linear_dataset.y_train[outlier_idx] -= 100 linear_utility = Utility( @@ -173,7 +141,6 @@ def test_polynomial( def test_polynomial_with_outlier( polynomial_dataset, polynomial_pipeline, - memcache_client_config, scorer, cache_backend, total_atol=1e-5, diff --git a/tests/value/shapley/test_truncated.py b/tests/value/shapley/test_truncated.py index ac980ab96..7d5977216 100644 --- a/tests/value/shapley/test_truncated.py +++ b/tests/value/shapley/test_truncated.py @@ -8,7 +8,7 @@ from pydvl.utils.score import Scorer, squashed_r2 from pydvl.value import compute_shapley_values from pydvl.value.shapley import ShapleyMode -from pydvl.value.shapley.truncated import NoTruncation +from pydvl.value.shapley.truncated import FixedTruncation, NoTruncation from pydvl.value.stopping import HistoryDeviation, MaxUpdates from .. import check_total_value, check_values @@ -16,92 +16,49 @@ log = logging.getLogger(__name__) -# noinspection PyTestParametrized @pytest.mark.parametrize( - "num_samples, fun, rtol, atol, kwargs", + "test_game", [ - ( - 12, - ShapleyMode.TruncatedMontecarlo, - 0.1, - 1e-5, - dict( - done=MaxUpdates(500), - truncation=NoTruncation(), - ), - ), + ("symmetric-voting", {"n_players": 6}), + ("shoes", {"left": 3, "right": 4}), ], + indirect=["test_game"], ) -def test_tmcs_analytic_montecarlo_shapley( - num_samples, - analytic_shapley, - parallel_config, - n_jobs, - fun: ShapleyMode, - rtol: float, - atol: float, - kwargs: dict, -): - u, exact_values = analytic_shapley - - values = compute_shapley_values( - u, mode=fun, n_jobs=n_jobs, config=parallel_config, progress=False, **kwargs - ) - - check_values(values, exact_values, rtol=rtol, atol=atol) - - @pytest.mark.parametrize( - "a, b, num_points", [(2, 0, 21)] # training set will have 0.3 * 21 = 6 samples -) -@pytest.mark.parametrize("scorer, rtol", [(squashed_r2, 0.25)]) -@pytest.mark.parametrize( - "fun, kwargs", + "done, truncation_cls, truncation_kwargs", [ - ( - ShapleyMode.TruncatedMontecarlo, - dict( - done=MaxUpdates(500), - truncation=NoTruncation(), - ), - ), + (MaxUpdates(600), NoTruncation, dict()), + (MaxUpdates(600), FixedTruncation, dict(fraction=0.9)), ], ) -def test_tmcs_linear_montecarlo_shapley( - linear_shapley, +def test_games( + test_game, + parallel_config, n_jobs, - memcache_client_config, - scorer: Scorer, - rtol: float, - fun: ShapleyMode, - kwargs: dict, + done, + truncation_cls, + truncation_kwargs, + seed, ): - """Tests values for all methods using a linear dataset. - - For permutation and truncated montecarlo, the rtol for each scorer is chosen - so that the number of samples selected is just above the (ε,δ) bound for ε = - rtol, δ=0.001 and the range corresponding to each score. This means that - roughly once every 1000/num_methods runs the test will fail. - - FIXME: - - For permutation, we must increase the number of samples above that what - is done for truncated, this is probably due to the averaging done by the - latter to reduce variance - - We don't have a bound for Owen. - NOTE: - - The variance in the combinatorial method is huge, so we need lots of - samples - - """ - u, exact_values = linear_shapley - check_total_value(u, exact_values, rtol=rtol) + try: + truncation = truncation_cls(test_game.u, **truncation_kwargs) + except TypeError: + # The NoTruncation class's constructor doesn't take any arguments + truncation = truncation_cls(**truncation_kwargs) values = compute_shapley_values( - u, mode=fun, progress=False, n_jobs=n_jobs, **kwargs + test_game.u, + mode=ShapleyMode.TruncatedMontecarlo, + done=done, + truncation=truncation, + n_jobs=n_jobs, + config=parallel_config, + seed=seed, + progress=True, ) - check_values(values, exact_values, rtol=rtol) - check_total_value(u, values, rtol=rtol) # FIXME, could be more than rtol + exact_values = test_game.shapley_values() + check_values(values, exact_values, rtol=0.2, atol=1e-4) @pytest.mark.parametrize( diff --git a/tests/value/test_semivalues.py b/tests/value/test_semivalues.py index 50a0201b7..e33f92543 100644 --- a/tests/value/test_semivalues.py +++ b/tests/value/test_semivalues.py @@ -1,4 +1,5 @@ import math +from itertools import islice from typing import Type import numpy as np @@ -17,6 +18,7 @@ ) from pydvl.value.semivalues import ( SVCoefficient, + _marginal, banzhaf_coefficient, beta_coefficient, compute_generic_semivalues, @@ -28,12 +30,112 @@ from .utils import timed -@pytest.mark.parametrize("num_samples", [5]) +@pytest.mark.parametrize( + "test_game", + [ + ("shoes", {"left": 3, "right": 2}), + ], + indirect=["test_game"], +) +@pytest.mark.parametrize( + "sampler, coefficient, batch_size", + [(PermutationSampler, beta_coefficient(1, 1), 5)], +) +def test_marginal_batch_size(test_game, sampler, coefficient, batch_size, seed): + # TODO: This test is probably not needed. + # Because I added it and then realized that it doesn't do much. + # The only difference between the two calls is that for the first one + # the loop is outside and the second one the loop is inside. + sampler_it = iter(sampler(test_game.u.data.indices, seed=seed)) + samples = tuple(islice(sampler_it, batch_size)) + + marginals_single = [] + for sample in samples: + marginals_single.extend( + _marginal(test_game.u, coefficient=coefficient, samples=[sample]) + ) + + marginals_batch = _marginal(test_game.u, coefficient=coefficient, samples=samples) + + assert len(marginals_single) == len(marginals_batch) + assert set(marginals_single) == set(marginals_batch) + + +@pytest.mark.parametrize("n", [10, 100]) +@pytest.mark.parametrize( + "coefficient", + [ + beta_coefficient(1, 1), + beta_coefficient(1, 16), + beta_coefficient(4, 1), + banzhaf_coefficient, + shapley_coefficient, + ], +) +def test_coefficients(n: int, coefficient: SVCoefficient): + r"""Coefficients for semi-values must fulfill: + + $$ \sum_{i=1}^{n}\choose{n-1}{j-1}w^{(n)}(j) = 1 $$ + + Note that we depart from the usual definitions by including the factor $1/n$ + in the shapley and beta coefficients. + """ + s = [math.comb(n - 1, j - 1) * coefficient(n, j - 1) for j in range(1, n + 1)] + assert np.isclose(1, np.sum(s)) + + +@pytest.mark.parametrize( + "test_game", + [ + ("symmetric-voting", {"n_players": 4}), + ("shoes", {"left": 1, "right": 1}), + ("shoes", {"left": 2, "right": 1}), + ("shoes", {"left": 1, "right": 2}), + ], + indirect=["test_game"], +) @pytest.mark.parametrize( "sampler", [ DeterministicUniformSampler, DeterministicPermutationSampler, + ], +) +@pytest.mark.parametrize("coefficient", [shapley_coefficient, beta_coefficient(1, 1)]) +def test_games_shapley_deterministic( + test_game, + parallel_config, + n_jobs, + sampler: Type[PowersetSampler], + coefficient: SVCoefficient, + seed: Seed, +): + criterion = MaxUpdates(50) + values = compute_generic_semivalues( + sampler(test_game.u.data.indices, seed=seed), + test_game.u, + coefficient, + criterion, + skip_converged=True, + n_jobs=n_jobs, + config=parallel_config, + progress=True, + ) + exact_values = test_game.shapley_values() + check_values(values, exact_values, rtol=0.1) + + +@pytest.mark.parametrize( + "test_game", + [ + ("symmetric-voting", {"n_players": 6}), + ("shoes", {"left": 3, "right": 2}), + ], + indirect=["test_game"], +) +@pytest.mark.parametrize( + "sampler", + [ UniformSampler, PermutationSampler, pytest.param(AntitheticSampler, marks=pytest.mark.slow), @@ -41,36 +143,55 @@ ], ) @pytest.mark.parametrize("coefficient", [shapley_coefficient, beta_coefficient(1, 1)]) -def test_shapley( - num_samples: int, - analytic_shapley, +def test_games_shapley( + test_game, + parallel_config, + n_jobs, sampler: Type[PowersetSampler], coefficient: SVCoefficient, - n_jobs: int, - parallel_config: ParallelConfig, seed: Seed, ): - u, exact_values = analytic_shapley - criterion = HistoryDeviation(50, 1e-3) | MaxUpdates(1000) + criterion = HistoryDeviation(50, 1e-4) | MaxUpdates(500) values = compute_generic_semivalues( - sampler(u.data.indices, seed=seed), - u, + sampler(test_game.u.data.indices, seed=seed), + test_game.u, coefficient, criterion, skip_converged=True, n_jobs=n_jobs, config=parallel_config, + progress=True, ) + + exact_values = test_game.shapley_values() check_values(values, exact_values, rtol=0.2) @pytest.mark.parametrize( - "num_samples,sampler,coefficient,batch_size", - [(5, PermutationSampler, beta_coefficient(1, 1), 5)], + "test_game", + [ + ("shoes", {"left": 3, "right": 2}), + ], + indirect=["test_game"], +) +@pytest.mark.parametrize( + "sampler, coefficient, batch_size", + [(PermutationSampler, beta_coefficient(1, 1), 5)], +) +@pytest.mark.parametrize( + "n_jobs", + [ + 1, + pytest.param( + 2, + marks=pytest.mark.xfail( + reason="Bad interaction between parallelization and batching" + ), + ), + ], ) def test_shapley_batch_size( - num_samples: int, - analytic_shapley, + test_game, sampler: Type[PermutationSampler], coefficient: SVCoefficient, batch_size: int, @@ -78,13 +199,12 @@ def test_shapley_batch_size( parallel_config: ParallelConfig, seed: Seed, ): - u, exact_values = analytic_shapley timed_fn = timed(compute_generic_semivalues) result_single_batch = timed_fn( - sampler(u.data.indices, seed=seed), - u, + sampler(test_game.u.data.indices, seed=seed), + test_game.u, coefficient, - done=HistoryDeviation(50, 1e-3) | MaxUpdates(1000), + done=MaxUpdates(100), skip_converged=True, n_jobs=n_jobs, batch_size=1, @@ -93,10 +213,10 @@ def test_shapley_batch_size( total_seconds_single_batch = timed_fn.execution_time result_multi_batch = timed_fn( - sampler(u.data.indices, seed=seed), - u, + sampler(test_game.u.data.indices, seed=seed), + test_game.u, coefficient, - done=HistoryDeviation(50, 1e-3) | MaxUpdates(1000), + done=MaxUpdates(100), skip_converged=True, n_jobs=n_jobs, batch_size=batch_size, @@ -141,26 +261,3 @@ def test_banzhaf( config=parallel_config, ) check_values(values, exact_values, rtol=0.2) - - -@pytest.mark.parametrize("n", [10, 100]) -@pytest.mark.parametrize( - "coefficient", - [ - beta_coefficient(1, 1), - beta_coefficient(1, 16), - beta_coefficient(4, 1), - banzhaf_coefficient, - shapley_coefficient, - ], -) -def test_coefficients(n: int, coefficient: SVCoefficient): - r"""Coefficients for semi-values must fulfill: - - $$ \sum_{i=1}^{n}\choose{n-1}{j-1}w^{(n)}(j) = 1 $$ - - Note that we depart from the usual definitions by including the factor $1/n$ - in the shapley and beta coefficients. - """ - s = [math.comb(n - 1, j - 1) * coefficient(n, j - 1) for j in range(1, n + 1)] - assert np.isclose(1, np.sum(s)) diff --git a/tests/value/test_stopping.py b/tests/value/test_stopping.py index 7399dc9c3..efebac9c4 100644 --- a/tests/value/test_stopping.py +++ b/tests/value/test_stopping.py @@ -62,10 +62,10 @@ def _check(self, result: ValuationResult) -> Status: assert (C() & C() & C())(v) == c assert (P() | P() | P())(v) == p - assert (C() & P()).name == "Composite StoppingCriterion: C AND P" - assert (C() | P()).name == "Composite StoppingCriterion: C OR P" - assert (~C()).name == "Composite StoppingCriterion: NOT C" - assert (~P()).name == "Composite StoppingCriterion: NOT P" + assert str(C() & P()) == "Composite StoppingCriterion: C AND P" + assert str(C() | P()) == "Composite StoppingCriterion: C OR P" + assert str(~C()) == "Composite StoppingCriterion: NOT C" + assert str(~P()) == "Composite StoppingCriterion: NOT P" def test_make_criterion(): @@ -88,9 +88,9 @@ def always_failed(result: ValuationResult) -> Status: assert P()(v) == Status.Pending assert F()(v) == Status.Failed - assert C().name == "always_converged" - assert P().name == "always_pending" - assert F().name == "always_failed" + assert str(C()) == "always_converged" + assert str(P()) == "always_pending" + assert str(F()) == "always_failed" assert (~C())(v) == Status.Failed assert (~P())(v) == Status.Converged @@ -104,7 +104,7 @@ def always_failed(result: ValuationResult) -> Status: def test_minmax_updates(): maxstop = MaxUpdates(10) - assert maxstop.name == "MaxUpdates" + assert str(maxstop) == "MaxUpdates(n_updates=10)" v = ValuationResult.from_random(5) v._counts = np.zeros(5) assert maxstop(v) == Status.Pending @@ -114,7 +114,7 @@ def test_minmax_updates(): assert maxstop(v) == Status.Converged minstop = MinUpdates(10) - assert minstop.name == "MinUpdates" + assert str(minstop) == "MinUpdates(n_updates=10)" v._counts = np.zeros(5) assert minstop(v) == Status.Pending v._counts += np.ones(5) * 9 diff --git a/tox.ini b/tox.ini index 666a5760c..d62cfe481 100644 --- a/tox.ini +++ b/tox.ini @@ -9,16 +9,13 @@ deps = extras = ray influence + memcached setenv = COVERAGE_FILE = {env:COVERAGE_FILE:{toxinidir}/.coverage.{envname}} passenv = CI [testenv:tests] -extras = - ray - influence - memcached commands = pytest -n auto --dist worksteal --cov "{envsitepackagesdir}/pydvl" {posargs} @@ -27,7 +24,7 @@ deps = {[testenv]deps} -r requirements-notebooks.txt commands = - pytest --nbmake -n 0 --cov "{envsitepackagesdir}/pydvl" notebooks/ {posargs} + pytest --nbmake --nbmake-timeout=30 -n 0 --cov "{envsitepackagesdir}/pydvl" notebooks/ {posargs} [testenv:linting] skip_install = true