diff --git a/.github/workflows/doc_build.yml b/.github/workflows/doc_build.yml index 3bb880877..bb3582bf6 100644 --- a/.github/workflows/doc_build.yml +++ b/.github/workflows/doc_build.yml @@ -4,6 +4,7 @@ on: push: branches: - staging + - separate_doc_branch jobs: doc_build: runs-on: ubuntu-latest @@ -24,12 +25,13 @@ jobs: pip install --no-cache-dir -e . pip install install 'Sphinx==4.1.2' 'sphinx-autoapi==1.8.4' 'sphinx-autodoc-typehints==1.12.0' 'sphinx-code-include==1.1.1' 'sphinx-rtd-theme==0.5.2' 'sphinxcontrib-applehelp==1.0.2' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.0' 'sphinxcontrib-jsmath==1.0.1' 'sphinxcontrib-napoleon==0.7' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5' autoapi nbsphinx myst_parser pandoc jupyter cd docssrc && make github - find . -iname '*.ipynb' -exec jupyter nbconvert --to notebook --inplace --execute {} \; - - name: deploy docs - uses: EndBug/add-and-commit@v7 - with: - add: 'docs' - author_name: Automated Author - author_email: info@mindsdb.com - message: updating docs + + - name: Deploy to another branch + uses: s0/git-publish-subdir-action@develop + env: + REPO: self + BRANCH: gh-pages # The branch name where you want to push the assets + FOLDER: docs # The directory where your assets are generated + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # GitHub will automatically add this - you don't need to bother getting a token + MESSAGE: "Rebuilt the docs" # The commit message \ No newline at end of file diff --git a/README.md b/README.md index 2d5cfc2f4..8417212ff 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,7 @@ predictor.learn(df) # Make the train/test splits and show predictions for a few examples test_df = predictor.split(predictor.preprocess(df))["test"] -preds = predictor.predict(test).iloc[:10] +preds = predictor.predict(test_df).iloc[:10] print(preds) ``` diff --git a/docs/.buildinfo b/docs/.buildinfo deleted file mode 100644 index bbb1f0912..000000000 --- a/docs/.buildinfo +++ /dev/null @@ -1,4 +0,0 @@ -# Sphinx build info version 1 -# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 76e6161839572ede51e212e970f19cd7 -tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/.nojekyll b/docs/.nojekyll deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/CNAME b/docs/CNAME deleted file mode 100644 index fbd6e4c29..000000000 --- a/docs/CNAME +++ /dev/null @@ -1 +0,0 @@ -lightwood.io diff --git a/docs/_images/baseencoder.png b/docs/_images/baseencoder.png deleted file mode 100644 index bf8de6349..000000000 Binary files a/docs/_images/baseencoder.png and /dev/null differ diff --git a/docs/_images/laptop.jpeg b/docs/_images/laptop.jpeg deleted file mode 100644 index c706d92d8..000000000 Binary files a/docs/_images/laptop.jpeg and /dev/null differ diff --git a/docs/_images/lightwood.png b/docs/_images/lightwood.png deleted file mode 100644 index d55f5c7fe..000000000 Binary files a/docs/_images/lightwood.png and /dev/null differ diff --git a/docs/_images/swag.png b/docs/_images/swag.png deleted file mode 100644 index 25e727b3c..000000000 Binary files a/docs/_images/swag.png and /dev/null differ diff --git a/docs/_images/tutorials_custom_explainer_custom_explainer_20_0.png b/docs/_images/tutorials_custom_explainer_custom_explainer_20_0.png deleted file mode 100644 index 6dd3fb269..000000000 Binary files a/docs/_images/tutorials_custom_explainer_custom_explainer_20_0.png and /dev/null differ diff --git a/docs/_images/tutorials_custom_splitter_custom_splitter_21_0.png b/docs/_images/tutorials_custom_splitter_custom_splitter_21_0.png deleted file mode 100644 index daa451a10..000000000 Binary files a/docs/_images/tutorials_custom_splitter_custom_splitter_21_0.png and /dev/null differ diff --git a/docs/_images/tutorials_custom_splitter_custom_splitter_5_1.png b/docs/_images/tutorials_custom_splitter_custom_splitter_5_1.png deleted file mode 100644 index aa97866f8..000000000 Binary files a/docs/_images/tutorials_custom_splitter_custom_splitter_5_1.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_0.png b/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_0.png deleted file mode 100644 index cc0742fd9..000000000 Binary files a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_0.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_1.png b/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_1.png deleted file mode 100644 index 26d1a9aff..000000000 Binary files a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_1.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_2.png b/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_2.png deleted file mode 100644 index 9db508a0e..000000000 Binary files a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_2.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_3.png b/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_3.png deleted file mode 100644 index a0d6c36ba..000000000 Binary files a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_3.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_4.png b/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_4.png deleted file mode 100644 index 8b8861971..000000000 Binary files a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_4.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_5.png b/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_5.png deleted file mode 100644 index eb935d00d..000000000 Binary files a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_5.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_6.png b/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_6.png deleted file mode 100644 index 51a644858..000000000 Binary files a/docs/_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_6.png and /dev/null differ diff --git a/docs/_images/tutorials_tutorial_time_series_Tutorial_-_Training_a_time_series_predictor_17_0.png b/docs/_images/tutorials_tutorial_time_series_Tutorial_-_Training_a_time_series_predictor_17_0.png deleted file mode 100644 index 3be4cf538..000000000 Binary files a/docs/_images/tutorials_tutorial_time_series_Tutorial_-_Training_a_time_series_predictor_17_0.png and /dev/null differ diff --git a/docs/_modules/api/dtype.html b/docs/_modules/api/dtype.html deleted file mode 100644 index bf1dc52ce..000000000 --- a/docs/_modules/api/dtype.html +++ /dev/null @@ -1,262 +0,0 @@ - - - - - - - - - - api.dtype — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • api.dtype
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for api.dtype

-
[docs]class dtype: - """ - Definitions of all data types currently supported. Dtypes currently supported include: - - - - **Numerical**: Data that should be represented in the form of a number. Currently ``integer``, ``float``, and ``quantity`` are supported. - - **Categorical**: Data that represents a class or label and is discrete. Currently ``binary``, ``categorical``, and ``tags`` are supported. - - **Date/Time**: Time-series data that is temporal/sequential. Currently ``date``, and ``datetime`` are supported. - - **Text**: Data that can be considered as language information. Currently ``short_text``, and ``rich_text`` are supported. Short text has a small vocabulary (~ 100 words) and is generally a limited number of characters. Rich text is anything with greater complexity. - - **Complex**: Data types that require custom techniques. Currently ``audio``, ``video`` and ``image`` are available, but highly experimental. - - **Array**: Data in the form of a sequence where order must be preserved. Currently ``array`` is the supported type. - - **Miscellaneous**: Miscellaneous data descriptors include ``empty``, an explicitly unknown value versus ``invalid``, a data type not currently supported. - - Custom data types may be implemented here as a flag for subsequent treatment and processing. You are welcome to include your own definitions, so long as they do not override the existing type names (alternatively, if you do, please edit subsequent parts of the preprocessing pipeline to correctly indicate how you want to deal with these data types). - """ # noqa - - # Numerical type data - integer = "integer" - float = "float" - quantity = "quantity" - - # Categorical type data - binary = "binary" - categorical = "categorical" - tags = "tags" - - # Dates and Times (time-series) - date = "date" - datetime = "datetime" - - # Text - short_text = "short_text" - rich_text = "rich_text" - - # Complex Data types - image = "image" - audio = "audio" - video = "video" - - # Series/Sequences - array = "array" - tsarray = 'tsarray' - - # Misc (Unk/NaNs) - empty = "empty" - invalid = "invalid"
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/api/high_level.html b/docs/_modules/api/high_level.html deleted file mode 100644 index 3ed0dba50..000000000 --- a/docs/_modules/api/high_level.html +++ /dev/null @@ -1,408 +0,0 @@ - - - - - - - - - - api.high_level — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • api.high_level
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for api.high_level

-import os
-from types import ModuleType
-from typing import Union
-import dill
-import pandas as pd
-from lightwood.api.types import DataAnalysis, JsonAI, ProblemDefinition
-from lightwood.data import statistical_analysis
-from lightwood.data import infer_types
-from lightwood.api.predictor import PredictorInterface
-from lightwood.api.json_ai import generate_json_ai
-import tempfile
-from lightwood.api.json_ai import code_from_json_ai as _code_from_json_ai
-import importlib.util
-import sys
-import random
-import string
-import gc
-import time
-from lightwood.helpers.log import log
-
-
-
[docs]def predictor_from_problem(df: pd.DataFrame, problem_definition: Union[ProblemDefinition, dict]) -> PredictorInterface: - """ - Creates a ready-to-train ``Predictor`` object from some raw data and a ``ProblemDefinition``. Do not use this if you want to edit the JsonAI first. Usually you'd want to next train this predictor by calling the ``learn`` method on the same dataframe used to create it. - - :param df: The raw data - :param problem_definition: The manual specifications for your predictive problem - - :returns: A lightwood ``Predictor`` object - """ # noqa - if not isinstance(problem_definition, ProblemDefinition): - problem_definition = ProblemDefinition.from_dict(problem_definition) - - log.info(f'Dropping features: {problem_definition.ignore_features}') - df = df.drop(columns=problem_definition.ignore_features) - - predictor_class_str = code_from_problem(df, problem_definition) - return predictor_from_code(predictor_class_str)
- - -
[docs]def json_ai_from_problem(df: pd.DataFrame, problem_definition: Union[ProblemDefinition, dict]) -> JsonAI: - """ - Creates a JsonAI from your raw data and problem definition. Usually you would use this when you want to subsequently edit the JsonAI, the easiest way to do this is to unload it to a dictionary via `to_dict`, modify it, and then create a new object from it using `lightwood.JsonAI.from_dict`. It's usually better to generate the JsonAI using this function rather than writing it from scratch. - - :param df: The raw data - :param problem_definition: The manual specifications for your predictive problem - - :returns: A ``JsonAI`` object generated based on your data and problem specifications - """ # noqa - if not isinstance(problem_definition, ProblemDefinition): - problem_definition = ProblemDefinition.from_dict(problem_definition) - - log.info(f'Dropping features: {problem_definition.ignore_features}') - df = df.drop(columns=problem_definition.ignore_features) - - type_information = infer_types(df, problem_definition.pct_invalid) - stats = statistical_analysis( - df, type_information.dtypes, type_information.identifiers, problem_definition) - json_ai = generate_json_ai( - type_information=type_information, statistical_analysis=stats, - problem_definition=problem_definition) - - return json_ai
- - -
[docs]def code_from_json_ai(json_ai: JsonAI) -> str: - """ - Autogenerates custom code based on the details you specified inside your JsonAI. - - :param json_ai: A ``JsonAI`` object - - :returns: Code (text) generate based on the ``JsonAI`` you created - """ - return _code_from_json_ai(json_ai)
- - -
[docs]def predictor_from_code(code: str) -> PredictorInterface: - """ - :param code: The ``Predictor``'s code in text form - - :returns: A lightwood ``Predictor`` object - """ - module_name = ''.join(random.choices(string.ascii_uppercase + string.digits, k=12)) - module_name += str(time.time()).replace('.', '') - predictor = _module_from_code(code, module_name).Predictor() - return predictor
- - -
[docs]def analyze_dataset(df: pd.DataFrame) -> DataAnalysis: - """ - You can use this to understand and visualize the data, it's not a part of the pipeline one would use for creating and training predictive models. - - :param df: The raw data - - :returns: An object containing insights about the data (specifically the type information and statistical analysis) - """ # noqa - - problem_definition = ProblemDefinition.from_dict({'target': str(df.columns[0])}) - - type_information = infer_types(df, problem_definition.pct_invalid) - stats = statistical_analysis( - df, type_information.dtypes, type_information.identifiers, problem_definition) - - return DataAnalysis( - type_information=type_information, - statistical_analysis=stats - )
- - -
[docs]def code_from_problem(df: pd.DataFrame, problem_definition: Union[ProblemDefinition, dict]) -> str: - """ - :param df: The raw data - :param problem_definition: The manual specifications for your predictive problem - - :returns: The text code generated based on your data and problem specifications - """ - if not isinstance(problem_definition, ProblemDefinition): - problem_definition = ProblemDefinition.from_dict(problem_definition) - - log.info(f'Dropping features: {problem_definition.ignore_features}') - df = df.drop(columns=problem_definition.ignore_features) - json_ai = json_ai_from_problem(df, problem_definition) - predictor_code = code_from_json_ai(json_ai) - return predictor_code
- - -
[docs]def predictor_from_state(state_file: str, code: str = None) -> PredictorInterface: - """ - :param state_file: The file containing the pickle resulting from calling ``save`` on a ``Predictor`` object - :param code: The ``Predictor``'s code in text form - - :returns: A lightwood ``Predictor`` object - """ - try: - module_name = None - with open(state_file, 'rb') as fp: - predictor = dill.load(fp) - except Exception as e: - module_name = str(e).lstrip("No module named '").split("'")[0] - if code is None: - raise Exception( - 'Provide code when loading a predictor from outside the scope/script it was created in!') - - if module_name is not None: - try: - del sys.modules[module_name] - except Exception: - pass - gc.collect() - _module_from_code(code, module_name) - with open(state_file, 'rb') as fp: - predictor = dill.load(fp) - - return predictor
- - -def _module_from_code(code: str, module_name: str) -> ModuleType: - """ - Create a python module (containing the generated ``Predictor`` class) from the code. This is both a python object and an associated temporary file on your filesystem - - :param code: The ``Predictor``'s code in text form - :param module_name: The name of the newly created module - - :returns: A python module object - """ # noqa - dirname = tempfile.gettempdir() - filename = os.urandom(24).hex() + str(time.time()).replace('.', '') + '.py' - path = os.path.join(dirname, filename) - if 'LIGHTWOOD_DEV_SAVE_TO' in os.environ: - path = os.environ['LIGHTWOOD_DEV_SAVE_TO'] - - with open(path, 'wb') as fp: - fp.write(code.encode('utf-8')) - spec = importlib.util.spec_from_file_location(module_name, fp.name) - temp_module = importlib.util.module_from_spec(spec) - sys.modules[module_name] = temp_module - spec.loader.exec_module(temp_module) - - return temp_module - - -
[docs]def predictor_from_json_ai(json_ai: JsonAI) -> PredictorInterface: - """ - Creates a ready-to-train ``Predictor`` object based on the details you specified inside your JsonAI. - - :param json_ai: A ``JsonAI`` object - - :returns: A lightwood ``Predictor`` object - """ # noqa - code = code_from_json_ai(json_ai) - predictor = predictor_from_code(code) - return predictor
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/api/json_ai.html b/docs/_modules/api/json_ai.html deleted file mode 100644 index 4616da6b6..000000000 --- a/docs/_modules/api/json_ai.html +++ /dev/null @@ -1,1375 +0,0 @@ - - - - - - - - - - api.json_ai — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • api.json_ai
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for api.json_ai

-# TODO: _add_implicit_values unit test ensures NO changes for a fully specified file.
-from typing import Dict
-from lightwood.helpers.templating import call, inline_dict, align
-from lightwood.api import dtype
-import numpy as np
-from lightwood.api.types import (
-    JsonAI,
-    TypeInformation,
-    StatisticalAnalysis,
-    Feature,
-    Output,
-    ProblemDefinition,
-)
-import inspect
-from lightwood.helpers.log import log
-
-
-# For custom modules, we create a module loader with necessary imports below
-IMPORT_EXTERNAL_DIRS = """
-for import_dir in [os.path.expanduser('~/lightwood_modules'), '/etc/lightwood_modules']:
-    if os.path.exists(import_dir) and os.access(import_dir, os.R_OK):
-        for file_name in list(os.walk(import_dir))[0][2]:
-            if file_name[-3:] != '.py':
-                continue
-            mod_name = file_name[:-3]
-            loader = importlib.machinery.SourceFileLoader(mod_name,
-                                                          os.path.join(import_dir, file_name))
-            module = ModuleType(loader.name)
-            loader.exec_module(module)
-            sys.modules[mod_name] = module
-            exec(f'import {mod_name}')
-"""
-
-IMPORTS = """
-import lightwood
-from lightwood.analysis import *
-from lightwood.api import *
-from lightwood.data import *
-from lightwood.encoder import *
-from lightwood.ensemble import *
-from lightwood.helpers.device import *
-from lightwood.helpers.general import *
-from lightwood.helpers.log import *
-from lightwood.helpers.numeric import *
-from lightwood.helpers.parallelism import *
-from lightwood.helpers.seed import *
-from lightwood.helpers.text import *
-from lightwood.helpers.torch import *
-from lightwood.mixer import *
-import pandas as pd
-from typing import Dict, List
-import os
-from types import ModuleType
-import importlib.machinery
-import sys
-"""
-
-
-
[docs]def lookup_encoder( - col_dtype: str, - col_name: str, - is_target: bool, - problem_defintion: ProblemDefinition, - is_target_predicting_encoder: bool, - statistical_analysis: StatisticalAnalysis, -): - """ - Assign a default encoder for a given column based on its data type, and whether it is a target. Encoders intake raw (but cleaned) data and return an feature representation. This function assigns, per data type, what the featurizer should be. This function runs on each column within the dataset available for model building to assign how it should be featurized. - - Users may override to create a custom encoder to enable their own featurization process. However, in order to generate template JSON-AI, this code runs automatically. Users may edit the generated syntax and use custom approaches while model building. - - For each encoder, "args" may be passed. These args depend an encoder requires during its preparation call. - - :param col_dtype: A data-type of a column specified - :param col_name: The name of the column - :param is_target: Whether the column is the target for prediction. If true, only certain possible feature representations are allowed, particularly for complex data types. - :param problem_definition: The ``ProblemDefinition`` criteria; this populates specifics on how models and encoders may be trained. - :param is_target_predicting_encoder: - """ # noqa - - tss = problem_defintion.timeseries_settings - encoder_lookup = { - dtype.integer: "Integer.NumericEncoder", - dtype.float: "Float.NumericEncoder", - dtype.binary: "Binary.BinaryEncoder", - dtype.categorical: "Categorical.CategoricalAutoEncoder" - if statistical_analysis is None - or len(statistical_analysis.histograms[col_name]) > 100 - else "Categorical.OneHotEncoder", - dtype.tags: "Tags.MultiHotEncoder", - dtype.date: "Date.DatetimeEncoder", - dtype.datetime: "Datetime.DatetimeEncoder", - dtype.image: "Image.Img2VecEncoder", - dtype.rich_text: "Rich_Text.PretrainedLangEncoder", - dtype.short_text: "Short_Text.CategoricalAutoEncoder", - dtype.array: "Array.ArrayEncoder", - dtype.tsarray: "TimeSeries.TimeSeriesEncoder", - dtype.quantity: "Quantity.NumericEncoder", - dtype.audio: "Audio.MFCCEncoder" - } - - # If column is a target, only specific feature representations are allowed that enable supervised tasks - target_encoder_lookup_override = { - dtype.rich_text: "Rich_Text.VocabularyEncoder", - dtype.categorical: "Categorical.OneHotEncoder", - } - - # Assign a default encoder to each column. - encoder_dict = {"module": encoder_lookup[col_dtype], "args": {}} - - # If the column is a target, ensure that the feature representation can enable supervised tasks - if is_target: - encoder_dict["args"] = {"is_target": "True"} - - if col_dtype in target_encoder_lookup_override: - encoder_dict["module"] = target_encoder_lookup_override[col_dtype] - - if col_dtype in (dtype.categorical, dtype.binary): - if problem_defintion.unbias_target: - encoder_dict["args"][ - "target_class_distribution" - ] = "$statistical_analysis.target_class_distribution" - - if col_dtype in (dtype.integer, dtype.float, dtype.array, dtype.tsarray): - encoder_dict["args"][ - "positive_domain" - ] = "$statistical_analysis.positive_domain" - - # Time-series representations require more advanced flags - if tss.is_timeseries: - gby = tss.group_by if tss.group_by is not None else [] - if col_name in tss.order_by + tss.historical_columns: - encoder_dict["module"] = col_dtype.capitalize() + ".TimeSeriesEncoder" - encoder_dict["args"]["original_type"] = f'"{col_dtype}"' - encoder_dict["args"]["target"] = "self.target" - encoder_dict["args"]["grouped_by"] = f"{gby}" - - if is_target: - if col_dtype in [dtype.integer]: - encoder_dict["args"]["grouped_by"] = f"{gby}" - encoder_dict["module"] = "Integer.TsNumericEncoder" - if col_dtype in [dtype.float]: - encoder_dict["args"]["grouped_by"] = f"{gby}" - encoder_dict["module"] = "Float.TsNumericEncoder" - if tss.nr_predictions > 1: - encoder_dict["args"]["grouped_by"] = f"{gby}" - encoder_dict["args"]["timesteps"] = f"{tss.nr_predictions}" - encoder_dict["module"] = "TimeSeries.TsArrayNumericEncoder" - if "__mdb_ts_previous" in col_name: - encoder_dict["module"] = "Array.ArrayEncoder" - encoder_dict["args"]["original_type"] = f'"{tss.target_type}"' - encoder_dict["args"]["window"] = f"{tss.window}" - - # Set arguments for the encoder - if encoder_dict["module"] == "Rich_Text.PretrainedLangEncoder" and not is_target: - encoder_dict["args"]["output_type"] = "$dtype_dict[$target]" - - if eval(encoder_dict["module"].split(".")[1]).is_trainable_encoder: - encoder_dict["args"]["stop_after"] = "$problem_definition.seconds_per_encoder" - - if is_target_predicting_encoder: - encoder_dict["args"]["embed_mode"] = "False" - return encoder_dict
- - -
[docs]def generate_json_ai( - type_information: TypeInformation, - statistical_analysis: StatisticalAnalysis, - problem_definition: ProblemDefinition, -) -> JsonAI: - """ - Given ``TypeInformation``, ``StatisticalAnalysis``, and the ``ProblemDefinition``, generate a JSON config file with the necessary elements of the ML pipeline populated. - - :param TypeInformation: Specifies what data types each column within the dataset are - :param statistical_analysis: - :param problem_definition: Specifies details of the model training/building procedure, as defined by ``ProblemDefinition`` - - :returns: JSON-AI object with fully populated details of the ML pipeline - """ # noqaexec - exec(IMPORTS, globals()) - exec(IMPORT_EXTERNAL_DIRS, globals()) - target = problem_definition.target - input_cols = [] - for col_name, col_dtype in type_information.dtypes.items(): - if ( - col_name not in type_information.identifiers - and col_dtype not in (dtype.invalid, dtype.empty) - and col_name != target - ): - input_cols.append(col_name) - - tss = problem_definition.timeseries_settings - is_target_predicting_encoder = False - is_ts = problem_definition.timeseries_settings.is_timeseries - # Single text column classification - if ( - len(input_cols) == 1 - and type_information.dtypes[input_cols[0]] in (dtype.rich_text) - and type_information.dtypes[target] in (dtype.categorical, dtype.binary) - ): - is_target_predicting_encoder = True - - if is_target_predicting_encoder: - mixers = [ - { - "module": "Unit", - "args": { - "target_encoder": "$encoders[self.target]", - "stop_after": "$problem_definition.seconds_per_mixer", - }, - } - ] - else: - mixers = [ - { - "module": "Neural", - "args": { - "fit_on_dev": True, - "stop_after": "$problem_definition.seconds_per_mixer", - "search_hyperparameters": True, - }, - } - ] - - if not tss.is_timeseries or tss.nr_predictions == 1: - mixers.extend( - [ - { - "module": "LightGBM", - "args": { - "stop_after": "$problem_definition.seconds_per_mixer", - "fit_on_dev": True, - }, - }, - { - "module": "Regression", - "args": { - "stop_after": "$problem_definition.seconds_per_mixer", - }, - }, - ] - ) - elif tss.nr_predictions > 1: - mixers.extend( - [ - { - "module": "LightGBMArray", - "args": { - "fit_on_dev": True, - "stop_after": "$problem_definition.seconds_per_mixer", - "n_ts_predictions": "$problem_definition.timeseries_settings.nr_predictions", - }, - } - ] - ) - - if tss.use_previous_target: - mixers.extend( - [ - { - "module": "SkTime", - "args": { - "stop_after": "$problem_definition.seconds_per_mixer", - "n_ts_predictions": "$problem_definition.timeseries_settings.nr_predictions", - }, - } - ] - ) - - outputs = { - target: Output( - data_dtype=type_information.dtypes[target], - encoder=None, - mixers=mixers, - ensemble={ - "module": "BestOf", - "args": { - "args": "$pred_args", - "accuracy_functions": "$accuracy_functions", - "ts_analysis": "self.ts_analysis" if is_ts else None, - }, - }, - ) - } - - if tss.is_timeseries and tss.nr_predictions > 1: - list(outputs.values())[0].data_dtype = dtype.tsarray - - list(outputs.values())[0].encoder = lookup_encoder( - type_information.dtypes[target], - target, - True, - problem_definition, - False, - statistical_analysis, - ) - - features: Dict[str, Feature] = {} - for col_name in input_cols: - col_dtype = type_information.dtypes[col_name] - dependency = [] - encoder = lookup_encoder( - col_dtype, - col_name, - False, - problem_definition, - is_target_predicting_encoder, - statistical_analysis, - ) - - if ( - tss.is_timeseries - and eval(encoder["module"].split(".")[1]).is_timeseries_encoder - ): - if tss.group_by is not None: - for group in tss.group_by: - dependency.append(group) - - if tss.use_previous_target: - dependency.append(f"__mdb_ts_previous_{target}") - - if len(dependency) > 0: - feature = Feature( - encoder=encoder, dependency=dependency, data_dtype=col_dtype - ) - else: - feature = Feature(encoder=encoder, data_dtype=col_dtype) - features[col_name] = feature - - # Decide on the accuracy functions to use - output_dtype = list(outputs.values())[0].data_dtype - if output_dtype in [ - dtype.integer, - dtype.float, - dtype.date, - dtype.datetime, - dtype.quantity, - ]: - accuracy_functions = ["r2_score"] - elif output_dtype in [dtype.categorical, dtype.tags, dtype.binary]: - accuracy_functions = ["balanced_accuracy_score"] - elif output_dtype in (dtype.array, dtype.tsarray): - accuracy_functions = ["evaluate_array_accuracy"] - else: - raise Exception( - f"Please specify a custom accuracy function for output type {output_dtype}" - ) - - # special dispatch for t+1 time series forecasters - if is_ts: - if list(outputs.values())[0].data_dtype in [dtype.integer, dtype.float]: - accuracy_functions = ["evaluate_array_accuracy"] - - if problem_definition.time_aim is None and ( - problem_definition.seconds_per_mixer is None - or problem_definition.seconds_per_encoder is None - ): - problem_definition.time_aim = ( - 1000 - + np.log(statistical_analysis.nr_rows / 10 + 1) - * np.sum( - [ - 4 - if x - in [ - dtype.rich_text, - dtype.short_text, - dtype.array, - dtype.tsarray, - dtype.video, - dtype.audio, - dtype.image, - ] - else 1 - for x in type_information.dtypes.values() - ] - ) - * 200 - ) - - if problem_definition.time_aim is not None: - nr_trainable_encoders = len( - [ - x - for x in features.values() - if eval(x.encoder["module"].split(".")[1]).is_trainable_encoder - ] - ) - nr_mixers = len(list(outputs.values())[0].mixers) - encoder_time_budget_pct = max( - 3.3 / 5, 1.5 + np.log(nr_trainable_encoders + 1) / 5 - ) - - if nr_trainable_encoders == 0: - problem_definition.seconds_per_encoder = 0 - else: - problem_definition.seconds_per_encoder = int( - problem_definition.time_aim - * (encoder_time_budget_pct / nr_trainable_encoders) - ) - problem_definition.seconds_per_mixer = int( - problem_definition.time_aim * ((1 / encoder_time_budget_pct) / nr_mixers) - ) - - return JsonAI( - cleaner=None, - splitter=None, - analyzer=None, - explainer=None, - features=features, - outputs=outputs, - problem_definition=problem_definition, - identifiers=type_information.identifiers, - timeseries_transformer=None, - timeseries_analyzer=None, - accuracy_functions=accuracy_functions, - )
- - -def _merge_implicit_values(field: dict, implicit_value: dict) -> dict: - """ - Helper function for `_populate_implicit_field`. - Takes a user-defined field along with its implicit value, and merges them together. - - :param field: JsonAI field with user-defined parameters. - :param implicit_value: implicit values for the field. - :return: original field with implicit values merged into it. - """ - exec(IMPORTS, globals()) - exec(IMPORT_EXTERNAL_DIRS, globals()) - module = eval(field["module"]) - - if inspect.isclass(module): - args = list(inspect.signature(module.__init__).parameters.keys())[1:] - else: - args = module.__code__.co_varnames - - for arg in args: - if "args" not in field: - field["args"] = implicit_value["args"] - else: - if arg not in field["args"]: - if arg in implicit_value["args"]: - field["args"][arg] = implicit_value["args"][arg] - - return field - - -def _populate_implicit_field( - json_ai: JsonAI, field_name: str, implicit_value: dict, is_timeseries: bool -) -> None: - """ - Populate the implicit field of the JsonAI, either by filling it in entirely if missing, or by introspecting the class or function and assigning default values to the args in it's signature that are in the implicit default but haven't been populated by the user - - :params: json_ai: ``JsonAI`` object that describes the ML pipeline that may not have every detail fully specified. - :params: field_name: Name of the field the implicit field in ``JsonAI`` - :params: implicit_value: The dictionary containing implicit values for the module and arg in the field - :params: is_timeseries: Whether or not this is a timeseries problem - - :returns: nothing, this method mutates the respective field of the ``JsonAI`` object it receives - """ # noqa - # These imports might be slow, in which case the only <easy> solution is to line this code - field = json_ai.__getattribute__(field_name) - if field is None: - # This if is to only populated timeseries-specific implicit fields for implicit problems - if is_timeseries or field_name not in ( - "timeseries_analyzer", - "timeseries_transformer", - ): - field = implicit_value - - # If the user specified one or more subfields in a field that's a list - # Populate them with implicit arguments form the implicit values from that subfield - elif isinstance(field, list) and isinstance(implicit_value, list): - for i in range(len(field)): - sub_field_implicit = [ - x for x in implicit_value if x["module"] == field[i]["module"] - ] - if len(sub_field_implicit) == 1: - field[i] = _merge_implicit_values(field[i], sub_field_implicit[0]) - for sub_field_implicit in implicit_value: - if ( - len([x for x in field if x["module"] == sub_field_implicit["module"]]) - == 0 - ): - field.append(sub_field_implicit) - # If the user specified the field, add implicit arguments which we didn't specify - else: - field = _merge_implicit_values(field, implicit_value) - json_ai.__setattr__(field_name, field) - - -def _add_implicit_values(json_ai: JsonAI) -> JsonAI: - """ - To enable brevity in writing, auto-generate the "unspecified/missing" details required in the ML pipeline. - - :params: json_ai: ``JsonAI`` object that describes the ML pipeline that may not have every detail fully specified. - - :returns: ``JSONAI`` object with all necessary parameters that were previously left unmentioned filled in. - """ - problem_definition = json_ai.problem_definition - tss = problem_definition.timeseries_settings - - # Add implicit arguments - # @TODO: Consider removing once we have a proper editor in studio - mixers = json_ai.outputs[json_ai.problem_definition.target].mixers - for i in range(len(mixers)): - if mixers[i]["module"] == "Unit": - pass - elif mixers[i]["module"] == "Neural": - mixers[i]["args"]["target_encoder"] = mixers[i]["args"].get( - "target_encoder", "$encoders[self.target]" - ) - mixers[i]["args"]["target"] = mixers[i]["args"].get("target", "$target") - mixers[i]["args"]["dtype_dict"] = mixers[i]["args"].get( - "dtype_dict", "$dtype_dict" - ) - mixers[i]["args"]["timeseries_settings"] = mixers[i]["args"].get( - "timeseries_settings", "$problem_definition.timeseries_settings" - ) - mixers[i]["args"]["net"] = mixers[i]["args"].get( - "net", - '"DefaultNet"' - if not tss.is_timeseries or not tss.use_previous_target - else '"ArNet"', - ) - - elif mixers[i]["module"] == "LightGBM": - mixers[i]["args"]["target"] = mixers[i]["args"].get("target", "$target") - mixers[i]["args"]["dtype_dict"] = mixers[i]["args"].get( - "dtype_dict", "$dtype_dict" - ) - mixers[i]["args"]["input_cols"] = mixers[i]["args"].get( - "input_cols", "$input_cols" - ) - elif mixers[i]["module"] == "Regression": - mixers[i]["args"]["target"] = mixers[i]["args"].get("target", "$target") - mixers[i]["args"]["dtype_dict"] = mixers[i]["args"].get( - "dtype_dict", "$dtype_dict" - ) - mixers[i]["args"]["target_encoder"] = mixers[i]["args"].get( - "target_encoder", "$encoders[self.target]" - ) - elif mixers[i]["module"] == "LightGBMArray": - mixers[i]["args"]["target"] = mixers[i]["args"].get("target", "$target") - mixers[i]["args"]["dtype_dict"] = mixers[i]["args"].get( - "dtype_dict", "$dtype_dict" - ) - mixers[i]["args"]["input_cols"] = mixers[i]["args"].get( - "input_cols", "$input_cols" - ) - elif mixers[i]["module"] == "SkTime": - mixers[i]["args"]["target"] = mixers[i]["args"].get("target", "$target") - mixers[i]["args"]["dtype_dict"] = mixers[i]["args"].get( - "dtype_dict", "$dtype_dict" - ) - mixers[i]["args"]["ts_analysis"] = mixers[i]["args"].get( - "ts_analysis", "$ts_analysis" - ) - - ensemble = json_ai.outputs[json_ai.problem_definition.target].ensemble - ensemble["args"]["target"] = ensemble["args"].get("target", "$target") - ensemble["args"]["data"] = ensemble["args"].get("data", "encoded_test_data") - ensemble["args"]["mixers"] = ensemble["args"].get("mixers", "$mixers") - - for name in json_ai.features: - if json_ai.features[name].dependency is None: - json_ai.features[name].dependency = [] - if json_ai.features[name].data_dtype is None: - json_ai.features[name].data_dtype = ( - json_ai.features[name].encoder["module"].split(".")[0].lower() - ) - - # Add "hidden" fields - hidden_fields = { - "cleaner": { - "module": "cleaner", - "args": { - "pct_invalid": "$problem_definition.pct_invalid", - "identifiers": "$identifiers", - "data": "data", - "dtype_dict": "$dtype_dict", - "target": "$target", - "mode": "$mode", - "timeseries_settings": "$problem_definition.timeseries_settings", - "anomaly_detection": "$problem_definition.anomaly_detection", - }, - }, - "splitter": { - "module": "splitter", - "args": { - "tss": "$problem_definition.timeseries_settings", - "data": "data", - "seed": 1, - "target": "$target", - "dtype_dict": "$dtype_dict", - "pct_train": 0.8, - "pct_dev": 0.1, - "pct_test": 0.1, - }, - }, - "analyzer": { - "module": "model_analyzer", - "args": { - "stats_info": "$statistical_analysis", - "ts_cfg": "$problem_definition.timeseries_settings", - "accuracy_functions": "$accuracy_functions", - "predictor": "$ensemble", - "data": "encoded_test_data", - "train_data": "encoded_train_data", - "target": "$target", - "dtype_dict": "$dtype_dict", - "analysis_blocks": "$analysis_blocks", - }, - }, - "explainer": { - "module": "explain", - "args": { - "timeseries_settings": "$problem_definition.timeseries_settings", - "positive_domain": "$statistical_analysis.positive_domain", - "anomaly_detection": "$problem_definition.anomaly_detection", - "data": "data", - "encoded_data": "encoded_data", - "predictions": "df", - "analysis": "$runtime_analyzer", - "ts_analysis": "$ts_analysis" if tss.is_timeseries else None, - "target_name": "$target", - "target_dtype": "$dtype_dict[self.target]", - "explainer_blocks": "$analysis_blocks", - "fixed_confidence": "$pred_args.fixed_confidence", - "anomaly_error_rate": "$pred_args.anomaly_error_rate", - "anomaly_cooldown": "$pred_args.anomaly_cooldown", - }, - }, - "analysis_blocks": [ - { - "module": "ICP", - "args": { - "fixed_significance": None, - "confidence_normalizer": False, - "positive_domain": "$statistical_analysis.positive_domain", - }, - }, - { - "module": "AccStats", - "args": {"deps": ["ICP"]}, - }, - ], - "timeseries_transformer": { - "module": "transform_timeseries", - "args": { - "timeseries_settings": "$problem_definition.timeseries_settings", - "data": "data", - "dtype_dict": "$dtype_dict", - "target": "$target", - "mode": "$mode", - }, - }, - "timeseries_analyzer": { - "module": "timeseries_analyzer", - "args": { - "timeseries_settings": "$problem_definition.timeseries_settings", - "data": "data", - "dtype_dict": "$dtype_dict", - "target": "$target", - }, - }, - } - - for field_name, implicit_value in hidden_fields.items(): - _populate_implicit_field(json_ai, field_name, implicit_value, tss.is_timeseries) - - return json_ai - - -
[docs]def code_from_json_ai(json_ai: JsonAI) -> str: - """ - Generates a custom ``PredictorInterface`` given the specifications from ``JsonAI`` object. - - :param json_ai: ``JsonAI`` object with fully specified parameters - - :returns: Automated syntax of the ``PredictorInterface`` object. - """ - # ----------------- # - # Fill in any missing values - json_ai = _add_implicit_values(json_ai) - - # ----------------- # - # Instantiate encoders - encoder_dict = { - json_ai.problem_definition.target: call( - list(json_ai.outputs.values())[0].encoder - ) - } - - # Instantiate Depedencies - dependency_dict = {} - dtype_dict = { - json_ai.problem_definition.target: f"""'{list(json_ai.outputs.values())[0].data_dtype}'""" - } - - # Populate features and their data-types - for col_name, feature in json_ai.features.items(): - encoder_dict[col_name] = call(feature.encoder) - dependency_dict[col_name] = feature.dependency - dtype_dict[col_name] = f"""'{feature.data_dtype}'""" - - # Populate time-series specific details - tss = json_ai.problem_definition.timeseries_settings - if tss.is_timeseries and tss.use_previous_target: - col_name = f"__mdb_ts_previous_{json_ai.problem_definition.target}" - json_ai.problem_definition.timeseries_settings.target_type = list( - json_ai.outputs.values() - )[0].data_dtype - encoder_dict[col_name] = call( - lookup_encoder( - list(json_ai.outputs.values())[0].data_dtype, - col_name, - False, - json_ai.problem_definition, - False, - None, - ) - ) - dependency_dict[col_name] = [] - dtype_dict[col_name] = f"""'{list(json_ai.outputs.values())[0].data_dtype}'""" - json_ai.features[col_name] = Feature(encoder=encoder_dict[col_name]) - - # ----------------- # - - input_cols = [x.replace("'", "\\'").replace('"', '\\"') for x in json_ai.features] - input_cols = ",".join([f"""'{name}'""" for name in input_cols]) - - # ----------------- # - # Time-series specific code blocks - # ----------------- # - - ts_transform_code = "" - ts_analyze_code = None - ts_encoder_code = "" - if json_ai.timeseries_transformer is not None: - ts_transform_code = f""" -log.info('Transforming timeseries data') -data = {call(json_ai.timeseries_transformer)} -""" - ts_analyze_code = f""" -self.ts_analysis = {call(json_ai.timeseries_analyzer)} -""" - # @TODO: set these kwargs/properties in the json ai construction (if possible) - if json_ai.timeseries_analyzer is not None: - ts_encoder_code = """ -if encoder.is_timeseries_encoder: - kwargs['ts_analysis'] = self.ts_analysis -""" - - if json_ai.problem_definition.timeseries_settings.is_timeseries: - ts_target_code = """ -if encoder.is_target: - encoder.normalizers = self.ts_analysis['target_normalizers'] - encoder.group_combinations = self.ts_analysis['group_combinations'] -""" - else: - ts_target_code = "" - - # ----------------- # - # Statistical Analysis Body - # ----------------- # - - analyze_data_body = f""" -log.info("Performing statistical analysis on data") -self.statistical_analysis = lightwood.data.statistical_analysis(data, - self.dtype_dict, - {json_ai.identifiers}, - self.problem_definition) - -# Instantiate post-training evaluation -self.analysis_blocks = [{', '.join([call(block) for block in json_ai.analysis_blocks])}] - """ - - analyze_data_body = align(analyze_data_body, 2) - - # ----------------- # - # Pre-processing Body - # ----------------- # - - clean_body = f""" -log.info('Cleaning the data') -data = {call(json_ai.cleaner)} - -# Time-series blocks -{ts_transform_code} -""" - if ts_analyze_code is not None: - clean_body += f""" -if self.mode != 'predict': -{align(ts_analyze_code,1)} -""" - - clean_body += '\nreturn data' - - clean_body = align(clean_body, 2) - - # ----------------- # - # Train-Test Splitter Body - # ----------------- # - - split_body = f""" -log.info("Splitting the data into train/test") -train_test_data = {call(json_ai.splitter)} - -return train_test_data - """ - - split_body = align(split_body, 2) - - # ----------------- # - # Prepare features Body - # ----------------- # - - prepare_body = f""" -self.mode = 'train' - -if self.statistical_analysis is None: - raise Exception("Please run analyze_data first") - -# Column to encoder mapping -self.encoders = {inline_dict(encoder_dict)} - -# Prepare the training + dev data -concatenated_train_dev = pd.concat([data['train'], data['dev']]) - -log.info('Preparing the encoders') - -encoder_prepping_dict = {{}} - -# Prepare encoders that do not require learned strategies -for col_name, encoder in self.encoders.items(): - if not encoder.is_trainable_encoder: - encoder_prepping_dict[col_name] = [encoder, concatenated_train_dev[col_name], 'prepare'] - log.info(f'Encoder prepping dict length of: {{len(encoder_prepping_dict)}}') - -# Setup parallelization -parallel_prepped_encoders = mut_method_call(encoder_prepping_dict) -for col_name, encoder in parallel_prepped_encoders.items(): - self.encoders[col_name] = encoder - -# Prepare the target -if self.target not in parallel_prepped_encoders: - if self.encoders[self.target].is_trainable_encoder: - self.encoders[self.target].prepare(data['train'][self.target], data['dev'][self.target]) - else: - self.encoders[self.target].prepare(pd.concat([data['train'], data['dev']])[self.target]) - -# Prepare any non-target encoders that are learned -for col_name, encoder in self.encoders.items(): - if encoder.is_trainable_encoder: - priming_data = pd.concat([data['train'], data['dev']]) - kwargs = {{}} - if self.dependencies[col_name]: - kwargs['dependency_data'] = {{}} - for col in self.dependencies[col_name]: - kwargs['dependency_data'][col] = {{ - 'original_type': self.dtype_dict[col], - 'data': priming_data[col] - }} - {align(ts_encoder_code, 3)} - - # If an encoder representation requires the target, provide priming data - if hasattr(encoder, 'uses_target'): - kwargs['encoded_target_values'] = parallel_prepped_encoders[self.target].encode(priming_data[self.target]) - - encoder.prepare(data['train'][col_name], data['dev'][col_name], **kwargs) - - {align(ts_target_code, 1)} -""" - prepare_body = align(prepare_body, 2) - - # ----------------- # - # Featurize Data Body - # ----------------- # - - feature_body = f""" -log.info('Featurizing the data') - -feature_data = {{ key: EncodedDs(self.encoders, data, self.target) for key, data in split_data.items() if key != "stratified_on"}} - -return feature_data - -""" # noqa - - feature_body = align(feature_body, 2) - - # ----------------- # - # Fit Mixer Body - # ----------------- # - - fit_body = f""" -self.mode = 'train' - -# --------------- # -# Extract data -# --------------- # -# Extract the featurized data into train/dev/test -encoded_train_data = enc_data['train'] -encoded_dev_data = enc_data['dev'] -encoded_test_data = enc_data['test'] - -log.info('Training the mixers') - -# --------------- # -# Fit Models -# --------------- # -# Assign list of mixers -self.mixers = [{', '.join([call(x) for x in list(json_ai.outputs.values())[0].mixers])}] - -# Train mixers -trained_mixers = [] -for mixer in self.mixers: - try: - mixer.fit(encoded_train_data, encoded_dev_data) - trained_mixers.append(mixer) - except Exception as e: - log.warning(f'Exception: {{e}} when training mixer: {{mixer}}') - if {json_ai.problem_definition.strict_mode} and mixer.stable: - raise e - -# Update mixers to trained versions -self.mixers = trained_mixers - -# --------------- # -# Create Ensembles -# --------------- # -log.info('Ensembling the mixer') -# Create an ensemble of mixers to identify best performing model -self.pred_args = PredictionArguments() -self.ensemble = {call(list(json_ai.outputs.values())[0].ensemble)} -self.supports_proba = self.ensemble.supports_proba -""" - fit_body = align(fit_body, 2) - - # ----------------- # - # Analyze Ensemble Body - # ----------------- # - - analyze_ensemble = f""" - -# --------------- # -# Extract data -# --------------- # -# Extract the featurized data into train/dev/test -encoded_train_data = enc_data['train'] -encoded_dev_data = enc_data['dev'] -encoded_test_data = enc_data['test'] - -# --------------- # -# Analyze Ensembles -# --------------- # -log.info('Analyzing the ensemble of mixers') -self.model_analysis, self.runtime_analyzer = {call(json_ai.analyzer)} -""" - analyze_ensemble = align(analyze_ensemble, 2) - - # ----------------- # - # Adjust Ensemble Body - # ----------------- # - - adjust_body = f""" -self.mode = 'train' - -# --------------- # -# Extract data -# --------------- # -# Extract the featurized data -encoded_old_data = new_data['old'] -encoded_new_data = new_data['new'] - -# --------------- # -# Adjust (Update) Mixers -# --------------- # -log.info('Updating the mixers') - -for mixer in self.mixers: - mixer.partial_fit(encoded_new_data, encoded_old_data) -""" # noqa - - adjust_body = align(adjust_body, 2) - - # ----------------- # - # Learn Body - # ----------------- # - - learn_body = f""" -self.mode = 'train' - -# Perform stats analysis -self.analyze_data(data) - -# Pre-process the data -data = self.preprocess(data) - -# Create train/test (dev) split -train_dev_test = self.split(data) - -# Prepare encoders -self.prepare(train_dev_test) - -# Create feature vectors from data -enc_train_test = self.featurize(train_dev_test) - -# Prepare mixers -self.fit(enc_train_test) - -# Analyze the ensemble -self.analyze_ensemble(enc_train_test) - -# ------------------------ # -# Enable model partial fit AFTER it is trained and evaluated for performance with the appropriate train/dev/test splits. -# This assumes the predictor could continuously evolve, hence including reserved testing data may improve predictions. -# SET `json_ai.problem_definition.fit_on_all=False` TO TURN THIS BLOCK OFF. - -# Update the mixers with partial fit -if self.problem_definition.fit_on_all: - - log.info("Adjustment on validation requested.") - update_data = {{"new": enc_train_test["test"], "old": ConcatedEncodedDs([enc_train_test["train"], enc_train_test["dev"]])}} # noqa - - self.adjust(update_data) - -""" - learn_body = align(learn_body, 2) - # ----------------- # - # Predict Body - # ----------------- # - - predict_body = f""" -# Remove columns that user specifies to ignore -self.mode = 'predict' -log.info(f'Dropping features: {{self.problem_definition.ignore_features}}') -data = data.drop(columns=self.problem_definition.ignore_features, errors='ignore') -for col in self.input_cols: - if col not in data.columns: - data[col] = [None] * len(data) - -# Pre-process the data -data = self.preprocess(data) - -# Featurize the data -encoded_ds = self.featurize({{"predict_data": data}})["predict_data"] -encoded_data = encoded_ds.get_encoded_data(include_target=False) - -self.pred_args = PredictionArguments.from_dict(args) -df = self.ensemble(encoded_ds, args=self.pred_args) - -if self.pred_args.all_mixers: - return df -else: - insights, global_insights = {call(json_ai.explainer)} - return insights -""" - - predict_body = align(predict_body, 2) - - predictor_code = f""" -{IMPORTS} -{IMPORT_EXTERNAL_DIRS} - -class Predictor(PredictorInterface): - target: str - mixers: List[BaseMixer] - encoders: Dict[str, BaseEncoder] - ensemble: BaseEnsemble - mode: str - - def __init__(self): - seed({json_ai.problem_definition.seed_nr}) - self.target = '{json_ai.problem_definition.target}' - self.mode = 'inactive' - self.problem_definition = ProblemDefinition.from_dict({json_ai.problem_definition.to_dict()}) - self.accuracy_functions = {json_ai.accuracy_functions} - self.identifiers = {json_ai.identifiers} - self.dtype_dict = {inline_dict(dtype_dict)} - - # Any feature-column dependencies - self.dependencies = {inline_dict(dependency_dict)} - - self.input_cols = [{input_cols}] - - # Initial stats analysis - self.statistical_analysis = None - - - def analyze_data(self, data: pd.DataFrame) -> None: - # Perform a statistical analysis on the unprocessed data -{analyze_data_body} - - def preprocess(self, data: pd.DataFrame) -> pd.DataFrame: - # Preprocess and clean data -{clean_body} - - def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]: - # Split the data into training/testing splits -{split_body} - - def prepare(self, data: Dict[str, pd.DataFrame]) -> None: - # Prepare encoders to featurize data -{prepare_body} - - def featurize(self, split_data: Dict[str, pd.DataFrame]): - # Featurize data into numerical representations for models -{feature_body} - - def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None: - # Fit predictors to estimate target -{fit_body} - - def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None: - # Evaluate quality of fit for the ensemble of mixers -{analyze_ensemble} - - def learn(self, data: pd.DataFrame) -> None: - log.info(f'Dropping features: {{self.problem_definition.ignore_features}}') - data = data.drop(columns=self.problem_definition.ignore_features, errors='ignore') -{learn_body} - - def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None: - # Update mixers with new information -{adjust_body} - - def predict(self, data: pd.DataFrame, args: Dict = {{}}) -> pd.DataFrame: -{predict_body} -""" - - try: - import black - except Exception: - black = None - - if black is not None: - log.info('Unable to import black formatter, predictor code might be a bit ugly.') - predictor_code = black.format_str(predictor_code, mode=black.FileMode()) - - return predictor_code
- - -
[docs]def validate_json_ai(json_ai: JsonAI) -> bool: - """ - Checks the validity of a ``JsonAI`` object - - :param json_ai: A ``JsonAI`` object - - :returns: Whether the JsonAI is valid, i.e. doesn't contain prohibited values, unknown values and can be turned into code. - """ # noqa - from lightwood.api.high_level import predictor_from_code, code_from_json_ai - - try: - predictor_from_code(code_from_json_ai(json_ai)) - return True - except Exception: - return False
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/api/predictor.html b/docs/_modules/api/predictor.html deleted file mode 100644 index fd578fcc4..000000000 --- a/docs/_modules/api/predictor.html +++ /dev/null @@ -1,361 +0,0 @@ - - - - - - - - - - api.predictor — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • api.predictor
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for api.predictor

-import dill
-from typing import Dict
-
-import pandas as pd
-from lightwood.api.types import ModelAnalysis
-
-
-# Interface that must be respected by predictor objects generated from JSON ML and/or compatible with Mindsdb
-
[docs]class PredictorInterface: - """ - Abstraction of a Lightwood predictor. The ``PredictorInterface`` encompasses how Lightwood interacts with the full ML pipeline. Internally, - - The ``PredictorInterface`` class must have several expected functions: - - - ``analyze_data``: Peform a statistical analysis on the unprocessed data; this helps inform downstream encoders and mixers on how to treat the data types. - - ``preprocess``: Apply cleaning functions to each of the columns within the dataset to prepare them for featurization - - ``split``: Split the input dataset into a train/dev/test set according to your splitter function - - ``prepare``: Create and, if necessary, train your encoders to create feature representations from each column of your data. - - ``featurize``: For input, pre-processed data, create feature vectors - - ``fit``: Train your mixer models to yield predictions from featurized data - - ``analyze_ensemble``: Evaluate the quality of fit for your mixer models - - ``adjust``: Incorporate new data to update pre-existing model(s). - - For simplification, we offer an end-to-end approach that allows you to input raw data and follow every step of the process until you reach a trained predictor with the ``learn`` function: - - - ``learn``: An end-to-end technique specifying how to pre-process, featurize, and train the model(s) of interest. The expected input is raw, untrained data. No explicit output is provided, but the Predictor object will "host" the trained model thus. - - You can also use the predictor to now estimate new data: - - - ``predict``: Deploys the chosen best model, and evaluates the given data to provide target estimates. - - ``save``: Saves the Predictor object for further use. - - The ``PredictorInterface`` is created via J{ai}son's custom code creation. A problem inherits from this class with pre-populated routines to fill out expected results, given the nature of each problem type. - """ # noqa - - model_analysis: ModelAnalysis = None - - def __init__(self): - pass - -
[docs] def analyze_data(self, data: pd.DataFrame) -> None: - """ - Performs a statistical analysis on the data to identify distributions, imbalanced classes, and other nuances within the data. - - :param data: Data used in training the model(s). - """ # noqa - pass
- -
[docs] def preprocess(self, data: pd.DataFrame) -> pd.DataFrame: - """ - Cleans the unprocessed dataset provided. - - :param data: (Unprocessed) Data used in training the model(s). - :returns: The cleaned data frame - """ # noqa - pass
- -
[docs] def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]: - """ - Categorizes the data into a training/testing split; if data is a classification problem, will stratify the data. - - :param data: Pre-processed data, but generically any dataset to split into train/dev/test. - :returns: Dictionary containing training/testing fraction - """ # noqa - pass
- -
[docs] def prepare(self, data: Dict[str, pd.DataFrame]) -> None: - """ - Prepares the encoders for each column of data. - - :param data: Pre-processed data that has been split into train/test. Explicitly uses "train" and/or "dev" in preparation of encoders. - - :returns: Nothing; prepares the encoders for learned representations. - """ # noqa
- -
[docs] def featurize(self, split_data: Dict[str, pd.DataFrame]): - """ - Provides an encoded representation for each dataset in ``split_data``. Requires `self.encoders` to be prepared. - - :param split_data: Pre-processed data from the dataset, split into train/test (or any other keys relevant) - - :returns: For each dataset provided in ``split_data``, the encoded representations of the data. - """ # noqa - pass
- -
[docs] def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None: - """ - Fits "mixer" models to train predictors on the featurized data. Instantiates a set of trained mixers and an ensemble of them. - - :param enc_data: Pre-processed and featurized data, split into the relevant train/test splits. Keys expected are "train", "dev", and "test" - """ # noqa - pass
- -
[docs] def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None: - """ - Evaluate the quality of mixers within an ensemble of models. - - :param enc_data: Pre-processed and featurized data, split into the relevant train/test splits. - """ - pass
- -
[docs] def learn(self, data: pd.DataFrame) -> None: - """ - Trains the attribute model starting from raw data. Raw data is pre-processed and cleaned accordingly. As data is assigned a particular type (ex: numerical, categorical, etc.), the respective feature encoder will convert it into a representation useable for training ML models. Of all ML models requested, these models are compiled and fit on the training data. - - This step amalgates ``preprocess`` -> ``featurize`` -> ``fit`` with the necessary splitting + analyze_data that occurs. - - :param data: (Unprocessed) Data used in training the model(s). - - :returns: Nothing; instantiates with best fit model from ensemble. - """ # noqa - pass
- -
[docs] def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None: - """ - Adjusts a previously trained model on new data. Adopts the same process as ``learn`` but with the exception that the `adjust` function expects the best model to have been already trained. - - .. warning:: This is experimental and subject to change. - :param new_data: New data used to adjust a previously trained model. Keys must reference "old" and "new" referencing to the old and new datasets. In some situations, the old data is still required to train a model (i.e. Regression) to ensure the new data doesn't entirely override it. - - :returns: Nothing; adjusts best-fit model - """ # noqa - pass
- -
[docs] def predict(self, data: pd.DataFrame, args: Dict[str, object] = {}) -> pd.DataFrame: - """ - Intakes raw data to provide predicted values for your trained model. - - :param data: Data (n_samples, n_columns) that the model(s) will evaluate on and provide the target prediction. - :param args: parameters needed to update the predictor ``PredictionArguments`` object, which holds any parameters relevant for prediction. - - :returns: A dataframe of predictions of the same length of input. - """ # noqa - pass
- -
[docs] def save(self, file_path: str) -> None: - """ - With a provided file path, saves the Predictor instance for later use. - - :param file_path: Location to store your Predictor Instance. - - :returns: Saves Predictor instance. - """ - with open(file_path, "wb") as fp: - dill.dump(self, fp)
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/api/types.html b/docs/_modules/api/types.html deleted file mode 100644 index 6fe6e3866..000000000 --- a/docs/_modules/api/types.html +++ /dev/null @@ -1,814 +0,0 @@ - - - - - - - - - - api.types — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • api.types
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for api.types

-# TODO: type hint the returns
-
-from typing import Dict, List, Optional, Union
-import sys
-
-if sys.version_info >= (3, 8):
-    from typing import TypedDict
-else:
-    from typing_extensions import TypedDict
-
-from dataclasses import dataclass
-from lightwood.helpers.log import log
-from dataclasses_json import dataclass_json
-from dataclasses_json.core import _asdict, Json
-import json
-
-
-# See: https://www.python.org/dev/peps/pep-0589/ for how this works
-# Not very intuitive but very powerful abstraction, might be useful in other places (@TODO)
-
[docs]class Module(TypedDict): - """ - Modules are the blocks of code that end up being called from the JSON AI, representing either object instantiations or function calls. - - :param module: Name of the module (function or class name) - :param args: Argument to pass to the function or constructor - """ # noqa - module: str - args: Dict[str, str]
- - -
[docs]@dataclass -class Feature: - """ - Within a dataframe, each column is considered its own "feature" (unless ignored etc.). \ - The following expects each feature to have descriptions of the following: - - :param encoder: the methodology for encoding a feature (a Lightwood Encoder) - :param data_dtype: The type of information within this column (ex.: numerical, categorical, etc.) - :param dependency: Any custom attributes for this feature that may require non-standard processing. This highly\ - depends on the encoder (ex: Pretrained text may be fine-tuned on the target; time-series requires prior time-steps). - """ - - encoder: Module - data_dtype: str = None - dependency: List[str] = None - -
[docs] @staticmethod - def from_dict(obj: Dict): - """ - Create ``Feature`` objects from the a dictionary representation. - - :param obj: A dictionary representation of a column feature's attributes. Must include keys *encoder*, \ - *data_dtype*, and *dependency*. - - :Example: - - >>> my_dict = {"feature_A": {"encoder": MyEncoder, "data_dtype": "categorical", "dependency": None}} - >>> print(Feature.from_dict(my_dict["feature_A"])) - >>> Feature(encoder=None, data_dtype='categorical', dependency=None) - - :returns: A Feature object with loaded information. - """ - encoder = obj["encoder"] - data_dtype = obj.get("data_dtype", None) - dependency = obj.get("dependency", None) - - feature = Feature(encoder=encoder, data_dtype=data_dtype, dependency=dependency) - - return feature
- -
[docs] @staticmethod - def from_json(data: str): - """ - Create ``Feature`` objects from JSON representation. This method calls on :ref: `from_dict` after loading the \ - json config. - - :param data: A JSON representation of the feature. - - :returns: Loaded information into the Feature representation. - """ - return Feature.from_dict(json.loads(data))
- -
[docs] def to_dict(self, encode_json=False) -> Dict[str, Json]: - """ - Converts a Feature to a dictionary representation. - - :returns: A python dictionary with strings indicating the three key elements and their respective values of \ - the Feature class. - """ - as_dict = _asdict(self, encode_json=encode_json) - for k in list(as_dict.keys()): - if as_dict[k] is None: - del as_dict[k] - return as_dict
- -
[docs] def to_json(self) -> Dict[str, Json]: - """ - Converts a Feature into a JSON object. Calls ``to_dict`` under the hood. - - :returns: Json config syntax for the three key elements and their respective values of the Feature class. - """ - return json.dumps(self.to_dict(), indent=4)
- - -
[docs]@dataclass_json -@dataclass -class Output: - """ - A representation for the output feature. This is specifically used on the target column of your dataset. \ - Four attributes are expected as seen below. - - Note, currently supervised tasks are supported, hence categorical, numerical, and time-series are the expected \ - outputs types. Complex features such as text generation are not currently available by default. - - :param data_dtype: The type of information within the target column (ex.: numerical, categorical, etc.). - :param encoder: the methodology for encoding the target feature (a Lightwood Encoder). There can only be one \ - encoder for the output target. - :param mixers: The list of ML algorithms that are trained for the target distribution. - :param ensemble: For a panel of ML algorithms, the approach of selecting the best mixer, and the metrics used in \ - that evaluation. - """ - - data_dtype: str - encoder: str = None - mixers: List[str] = None - ensemble: str = None
- - -
[docs]@dataclass_json -@dataclass -class TypeInformation: - """ - For a dataset, provides information on columns types, how they're used, and any other potential identifiers. - - TypeInformation is generated within ``data.infer_types``, where small samples of each column are evaluated in a custom framework to understand what kind of data type the model is. The user may override data types, but it is recommended to do so within a JSON-AI config file. - - :param dtypes: For each column's name, the associated data type inferred. - :param additional_info: Any possible sub-categories or additional descriptive information. - :param identifiers: Columns within the dataset highly suspected of being identifiers or IDs. These do not contain informatic value, therefore will be ignored in subsequent training/analysis procedures unless manually indicated. - """ # noqa - - dtypes: Dict[str, str] - additional_info: Dict[str, object] - identifiers: Dict[str, str] - - def __init__(self): - self.dtypes = dict() - self.additional_info = dict() - self.identifiers = dict()
- - -
[docs]@dataclass_json -@dataclass -class StatisticalAnalysis: - """ - The Statistical Analysis data class allows users to consider key descriptors of their data using simple \ - techniques such as histograms, mean and standard deviation, word count, missing values, and any detected bias\ - in the information. - - :param nr_rows: Number of rows (samples) in the dataset - :param df_target_stddev: The standard deviation of the target of the dataset - :param train_observed_classes: - :param target_class_distribution: - :param histograms: - :param buckets: - :param missing: - :param distinct: - :param bias: - :param avg_words_per_sentence: - :param positive_domain: - """ - - nr_rows: int - df_target_stddev: Optional[float] - train_observed_classes: object # Union[None, List[str]] - target_class_distribution: object # Dict[str, float] - histograms: object # Dict[str, Dict[str, List[object]]] - buckets: object # Dict[str, Dict[str, List[object]]] - missing: object - distinct: object - bias: object - avg_words_per_sentence: object - positive_domain: bool
- - -
[docs]@dataclass_json -@dataclass -class DataAnalysis: - """ - Data Analysis wraps :class: `.StatisticalAnalysis` and :class: `.TypeInformation` together. Further details can be seen in their respective documentation references. - """ # noqa - - statistical_analysis: StatisticalAnalysis - type_information: TypeInformation
- - -
[docs]@dataclass -class TimeseriesSettings: - """ - For time-series specific problems, more specific treatment of the data is necessary. The following attributes \ - enable time-series tasks to be carried out properly. - - :param is_timeseries: Whether the input data should be treated as time series; if true, this flag is checked in \ - subsequent internal steps to ensure processing is appropriate for time-series data. - :param order_by: A list of columns by which the data should be ordered. - :param group_by: Optional list of columns by which the data should be grouped. Each different combination of values\ - for these columns will yield a different series. - :param window: The temporal horizon (number of rows) that a model intakes to "look back" into when making a\ - prediction, after the rows are ordered by order_by columns and split into groups if applicable. - :param nr_predictions: The number of points in the future that predictions should be made for, defaults to 1. Once \ - trained, the model will be able to predict up to this many points into the future. - :param historical_columns: The temporal dynamics of these columns will be used as additional context to train the \ - time series predictor. Note that a non-historical column shall still be used to forecast, but without \ - considering their change through time. - :param target_type: Automatically inferred dtype of the target (e.g. `dtype.integer`, `dtype.float`). - :param use_previous_target: Use the previous values of the target column to generate predictions. Defaults to True. - """ - - is_timeseries: bool - order_by: List[str] = None - window: int = None - group_by: List[str] = None - use_previous_target: bool = True - nr_predictions: int = None - historical_columns: List[str] = None - target_type: str = ( - "" # @TODO: is the current setter (outside of initialization) a sane option? - # @TODO: George: No, I don't think it is, we need to pass this some other way - ) - allow_incomplete_history: bool = False - -
[docs] @staticmethod - def from_dict(obj: Dict): - """ - Creates a TimeseriesSettings object from python dictionary specifications. - - :param: obj: A python dictionary with the necessary representation for time-series. The only mandatory columns are ``order_by`` and ``window``. - - :returns: A populated ``TimeseriesSettings`` object. - """ # noqa - if len(obj) > 0: - for mandatory_setting in ["order_by", "window"]: - if mandatory_setting not in obj: - err = f"Missing mandatory timeseries setting: {mandatory_setting}" - log.error(err) - raise Exception(err) - - timeseries_settings = TimeseriesSettings( - is_timeseries=True, - order_by=obj["order_by"], - window=obj["window"], - use_previous_target=obj.get("use_previous_target", True), - historical_columns=[], - nr_predictions=obj.get("nr_predictions", 1), - allow_incomplete_history=obj.get('allow_incomplete_history', False) - ) - for setting in obj: - timeseries_settings.__setattr__(setting, obj[setting]) - - else: - timeseries_settings = TimeseriesSettings(is_timeseries=False) - - return timeseries_settings
- -
[docs] @staticmethod - def from_json(data: str): - """ - Creates a TimeseriesSettings object from JSON specifications via python dictionary. - - :param: data: JSON-config file with necessary Time-series specifications - - :returns: A populated ``TimeseriesSettings`` object. - """ - return TimeseriesSettings.from_dict(json.loads(data))
- -
[docs] def to_dict(self, encode_json=False) -> Dict[str, Json]: - """ - Creates a dictionary from ``TimeseriesSettings`` object - - :returns: A python dictionary containing the ``TimeSeriesSettings`` specifications. - """ - return _asdict(self, encode_json=encode_json)
- -
[docs] def to_json(self) -> Dict[str, Json]: - """ - Creates JSON config from TimeseriesSettings object - :returns: The JSON config syntax containing the ``TimeSeriesSettings`` specifications. - """ - return json.dumps(self.to_dict())
- - -
[docs]@dataclass -class ProblemDefinition: - """ - The ``ProblemDefinition`` object indicates details on how the models that predict the target are prepared. \ - The only required specification from a user is the ``target``, which indicates the column within the input \ - data that the user is trying to predict. Within the ``ProblemDefinition``, the user can specify aspects \ - about how long the feature-engineering preparation may take, and nuances about training the models. - - :param target: The name of the target column; this is the column that will be used as the goal of the prediction. - :param pct_invalid: Number of data points maximally tolerated as invalid/missing/unknown. \ - If the data cleaning process exceeds this number, no subsequent steps will be taken. - :param unbias_target: all classes are automatically weighted inverse to how often they occur - :param seconds_per_mixer: Number of seconds maximum to spend PER mixer trained in the list of possible mixers. - :param seconds_per_encoder: Number of seconds maximum to spend when training an encoder that requires data to \ - learn a representation. - :param time_aim: Time budget (in seconds) to train all needed components for the predictive tasks, including \ - encoders and models. - :param target_weights: indicates to the accuracy functions how much to weight every target class. - :param positive_domain: For numerical taks, force predictor output to be positive (integer or float). - :param timeseries_settings: TimeseriesSettings object for time-series tasks, refer to its documentation for \ - available settings. - :param anomaly_detection: Whether to conduct unsupervised anomaly detection; currently supported only for time-\ - series. - :param ignore_features: The names of the columns the user wishes to ignore in the ML pipeline. Any column name \ - found in this list will be automatically removed from subsequent steps in the ML pipeline. - :param fit_on_all: Whether to fit the model on the held-out validation data. Validation data is strictly \ - used to evaluate how well a model is doing and is NEVER trained. However, in cases where users anticipate new \ - incoming data over time, the user may train the model further using the entire dataset. - :param strict_mode: crash if an `unstable` block (mixer, encoder, etc.) fails to run. - :param seed_nr: custom seed to use when generating a predictor from this problem definition. - """ - - target: str - pct_invalid: float - unbias_target: bool - seconds_per_mixer: Union[int, None] - seconds_per_encoder: Union[int, None] - time_aim: Union[int, None] - target_weights: Union[List[float], None] - positive_domain: bool - timeseries_settings: TimeseriesSettings - anomaly_detection: bool - ignore_features: List[str] - fit_on_all: bool - strict_mode: bool - seed_nr: int - -
[docs] @staticmethod - def from_dict(obj: Dict): - """ - Creates a ProblemDefinition object from a python dictionary with necessary specifications. - - :param obj: A python dictionary with the necessary features for the ``ProblemDefinition`` class. - Only requires ``target`` to be specified. - - :returns: A populated ``ProblemDefinition`` object. - """ - target = obj['target'] - pct_invalid = obj.get('pct_invalid', 2) - unbias_target = obj.get('unbias_target', True) - seconds_per_mixer = obj.get('seconds_per_mixer', None) - seconds_per_encoder = obj.get('seconds_per_encoder', None) - time_aim = obj.get('time_aim', None) - target_weights = obj.get('target_weights', None) - positive_domain = obj.get('positive_domain', False) - timeseries_settings = TimeseriesSettings.from_dict(obj.get('timeseries_settings', {})) - anomaly_detection = obj.get('anomaly_detection', True) - ignore_features = obj.get('ignore_features', []) - fit_on_all = obj.get('fit_on_all', True) - strict_mode = obj.get('strict_mode', True) - seed_nr = obj.get('seed_nr', 420) - problem_definition = ProblemDefinition( - target=target, - pct_invalid=pct_invalid, - unbias_target=unbias_target, - seconds_per_mixer=seconds_per_mixer, - seconds_per_encoder=seconds_per_encoder, - time_aim=time_aim, - target_weights=target_weights, - positive_domain=positive_domain, - timeseries_settings=timeseries_settings, - anomaly_detection=anomaly_detection, - ignore_features=ignore_features, - fit_on_all=fit_on_all, - strict_mode=strict_mode, - seed_nr=seed_nr - ) - - return problem_definition
- -
[docs] @staticmethod - def from_json(data: str): - """ - Creates a ProblemDefinition Object from JSON config file. - - :param data: - - :returns: A populated ProblemDefinition object. - """ - return ProblemDefinition.from_dict(json.loads(data))
- -
[docs] def to_dict(self, encode_json=False) -> Dict[str, Json]: - """ - Creates a python dictionary from the ProblemDefinition object - - :returns: A python dictionary - """ - return _asdict(self, encode_json=encode_json)
- -
[docs] def to_json(self) -> Dict[str, Json]: - """ - Creates a JSON config from the ProblemDefinition object - - :returns: TODO - """ - return json.dumps(self.to_dict())
- - -
[docs]@dataclass -class JsonAI: - """ - The JsonAI Class allows users to construct flexible JSON config to specify their ML pipeline. JSON-AI follows a \ - recipe of how to pre-process data, construct features, and train on the target column. To do so, the following \ - specifications are required internally. - - :param features: The corresponding``Feature`` object for each of the column names of the dataset - :param outputs: The column name of the target and its ``Output`` object - :param problem_definition: The ``ProblemDefinition`` criteria. - :param identifiers: A dictionary of column names and respective data types that are likely identifiers/IDs within the data. Through the default cleaning process, these are ignored. - :param cleaner: The Cleaner object represents the pre-processing step on a dataframe. The user can specify custom subroutines, if they choose, on how to handle preprocessing. Alternatively, "None" suggests Lightwood's default approach in ``data.cleaner``. - :param splitter: The Splitter object is the method in which the input data is split into training/validation/testing data. - :param analyzer: The Analyzer object is used to evaluate how well a model performed on the predictive task. - :param explainer: The Explainer object deploys explainability tools of interest on a model to indicate how well a model generalizes its predictions. - :param analysis_blocks: The blocks that get used in both analysis and inference inside the analyzer and explainer blocks. - :param timeseries_transformer: Procedure used to transform any timeseries task dataframe into the format that lightwood expects for the rest of the pipeline. - :param timeseries_analyzer: Procedure that extracts key insights from any timeseries in the data (e.g. measurement frequency, target distribution, etc). - :param accuracy_functions: A list of performance metrics used to evaluate the best mixers. - """ # noqa - - features: Dict[str, Feature] - outputs: Dict[str, Output] - problem_definition: ProblemDefinition - identifiers: Dict[str, str] - cleaner: Optional[Module] = None - splitter: Optional[Module] = None - analyzer: Optional[Module] = None - explainer: Optional[Module] = None - analysis_blocks: Optional[List[Module]] = None - timeseries_transformer: Optional[Module] = None - timeseries_analyzer: Optional[Module] = None - accuracy_functions: Optional[List[str]] = None - -
[docs] @staticmethod - def from_dict(obj: Dict): - """ - Creates a JSON-AI object from dictionary specifications of the JSON-config. - """ - features = {k: Feature.from_dict(v) for k, v in obj["features"].items()} - outputs = {k: Output.from_dict(v) for k, v in obj["outputs"].items()} - problem_definition = ProblemDefinition.from_dict(obj["problem_definition"]) - identifiers = obj["identifiers"] - cleaner = obj.get("cleaner", None) - splitter = obj.get("splitter", None) - analyzer = obj.get("analyzer", None) - explainer = obj.get("explainer", None) - analysis_blocks = obj.get("analysis_blocks", None) - timeseries_transformer = obj.get("timeseries_transformer", None) - timeseries_analyzer = obj.get("timeseries_analyzer", None) - accuracy_functions = obj.get("accuracy_functions", None) - - json_ai = JsonAI( - features=features, - outputs=outputs, - problem_definition=problem_definition, - identifiers=identifiers, - cleaner=cleaner, - splitter=splitter, - analyzer=analyzer, - explainer=explainer, - analysis_blocks=analysis_blocks, - timeseries_transformer=timeseries_transformer, - timeseries_analyzer=timeseries_analyzer, - accuracy_functions=accuracy_functions, - ) - - return json_ai
- -
[docs] @staticmethod - def from_json(data: str): - """ Creates a JSON-AI object from JSON config""" - return JsonAI.from_dict(json.loads(data))
- -
[docs] def to_dict(self, encode_json=False) -> Dict[str, Json]: - """ - Creates a python dictionary with necessary modules within the ML pipeline specified from the JSON-AI object. - - :returns: A python dictionary that has the necessary components of the ML pipeline for a given dataset. - """ - as_dict = _asdict(self, encode_json=encode_json) - for k in list(as_dict.keys()): - if k == "features": - feature_dict = {} - for name in self.features: - feature_dict[name] = self.features[name].to_dict() - as_dict[k] = feature_dict - if as_dict[k] is None: - del as_dict[k] - return as_dict
- -
[docs] def to_json(self) -> Dict[str, Json]: - """ - Creates JSON config to represent the necessary modules within the ML pipeline specified from the JSON-AI object. - - :returns: A JSON config that has the necessary components of the ML pipeline for a given dataset. - """ - return json.dumps(self.to_dict(), indent=4)
- - -
[docs]@dataclass_json -@dataclass -class ModelAnalysis: - """ - The ``ModelAnalysis`` class stores useful information to describe a model and understand its predictive performance on a validation dataset. - For each trained ML algorithm, we store: - - :param accuracies: Dictionary with obtained values for each accuracy function (specified in JsonAI) - :param accuracy_histogram: Dictionary with histograms of reported accuracy by target value. - :param accuracy_samples: Dictionary with sampled pairs of observed target values and respective predictions. - :param train_sample_size: Size of the training set (data that parameters are updated on) - :param test_sample_size: Size of the testing set (explicitly held out) - :param column_importances: Dictionary with the importance of each column for the model, as estimated by an approach that closely follows a leave-one-covariate-out strategy. - :param confusion_matrix: A confusion matrix for the validation dataset. - :param histograms: Histogram for each dataset feature. - :param dtypes: Inferred data types for each dataset feature. - - """ # noqa - - accuracies: Dict[str, float] - accuracy_histogram: Dict[str, list] - accuracy_samples: Dict[str, list] - train_sample_size: int - test_sample_size: int - column_importances: Dict[str, float] - confusion_matrix: object - histograms: object - dtypes: object
- - -
[docs]@dataclass -class PredictionArguments: - """ - This class contains all possible arguments that can be passed to a Lightwood predictor at inference time. - On each predict call, all arguments included in a parameter dictionary will update the respective fields - in the `PredictionArguments` instance that the predictor will have. - - :param predict_proba: triggers (where supported) predictions in raw probability output form. I.e. for classifiers, - instead of returning only the predicted class, the output additionally includes the assigned probability for - each class. - :param all_mixers: forces an ensemble to return predictions emitted by all its internal mixers. - :param fixed_confidence: For analyzer module, specifies a fixed `alpha` confidence for the model calibration so \ - that predictions, in average, are correct `alpha` percent of the time. - :param anomaly_error_rate: Error rate for unsupervised anomaly detection. Bounded between 0.01 and 0.99 \ - (respectively implies wider and tighter bounds, all other parameters being equal). - :param anomaly_cooldown: Sets the minimum amount of timesteps between consecutive firings of the the anomaly \ - detector. - """ # noqa - - predict_proba: bool = False - all_mixers: bool = False - fixed_confidence: Union[int, float, None] = None - anomaly_error_rate: Union[float, None] = None - anomaly_cooldown: int = 1 - -
[docs] @staticmethod - def from_dict(obj: Dict): - """ - Creates a ``PredictionArguments`` object from a python dictionary with necessary specifications. - - :param obj: A python dictionary with the necessary features for the ``PredictionArguments`` class. - - :returns: A populated ``PredictionArguments`` object. - """ - - # maybe this should be stateful instead, and save the latest used value for each field? - predict_proba = obj.get('predict_proba', PredictionArguments.predict_proba) - all_mixers = obj.get('all_mixers', PredictionArguments.all_mixers) - fixed_confidence = obj.get('fixed_confidence', PredictionArguments.fixed_confidence) - anomaly_error_rate = obj.get('anomaly_error_rate', PredictionArguments.anomaly_error_rate) - anomaly_cooldown = obj.get('anomaly_cooldown', PredictionArguments.anomaly_cooldown) - - pred_args = PredictionArguments( - predict_proba=predict_proba, - all_mixers=all_mixers, - fixed_confidence=fixed_confidence, - anomaly_error_rate=anomaly_error_rate, - anomaly_cooldown=anomaly_cooldown, - ) - - return pred_args
- -
[docs] def to_dict(self, encode_json=False) -> Dict[str, Json]: - """ - Creates a python dictionary from the ``PredictionArguments`` object - - :returns: A python dictionary - """ - return _asdict(self, encode_json=encode_json)
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/data/cleaner.html b/docs/_modules/data/cleaner.html deleted file mode 100644 index 169525802..000000000 --- a/docs/_modules/data/cleaner.html +++ /dev/null @@ -1,538 +0,0 @@ - - - - - - - - - - data.cleaner — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • data.cleaner
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for data.cleaner

-import re
-from copy import deepcopy
-
-import pandas as pd
-import datetime
-from dateutil.parser import parse as parse_dt
-
-from lightwood.api.dtype import dtype
-from lightwood.helpers import text
-from lightwood.helpers.log import log
-from lightwood.api.types import TimeseriesSettings
-from lightwood.helpers.numeric import is_nan_numeric
-
-from typing import Dict, List, Optional, Tuple, Callable, Union
-
-
-
[docs]def cleaner( - data: pd.DataFrame, - dtype_dict: Dict[str, str], - pct_invalid: float, - identifiers: Dict[str, str], - target: str, - mode: str, - timeseries_settings: TimeseriesSettings, - anomaly_detection: bool, - custom_cleaning_functions: Dict[str, str] = {} -) -> pd.DataFrame: - """ - The cleaner is a function which takes in the raw data, plus additional information about it's types and about the problem. Based on this it generates a "clean" representation of the data, where each column has an ideal standardized type and all malformed or otherwise missing or invalid elements are turned into ``None`` - - :param data: The raw data - :param dtype_dict: Type information for each column - :param pct_invalid: How much of each column can be invalid - :param identifiers: A dict containing all identifier typed columns - :param target: The target columns - :param mode: Can be "predict" or "train" - :param timeseries_settings: Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object - :param anomaly_detection: Are we detecting anomalies with this predictor? - - :returns: The cleaned data - """ # noqa - - data = _remove_columns(data, identifiers, target, mode, timeseries_settings, - anomaly_detection, dtype_dict) - - for col in _get_columns_to_clean(data, dtype_dict, mode, target): - - # Get and apply a cleaning function for each data type - # If you want to customize the cleaner, it's likely you can to modify ``get_cleaning_func`` - data[col] = data[col].apply(get_cleaning_func(dtype_dict[col], custom_cleaning_functions)) - - return data
- - -def _check_if_invalid(new_data: pd.Series, pct_invalid: float, col_name: str): - """ - Checks how many invalid data points there are. Invalid data points are flagged as "Nones" from the cleaning processs (see data/cleaner.py for default). - If there are too many invalid data points (specified by `pct_invalid`), then an error message will pop up. This is used as a safeguard for very messy data. - - :param new_data: data to check for invalid values. - :param pct_invalid: maximum percentage of invalid values. If this threshold is surpassed, an exception is raised. - :param col_name: name of the column to analyze. - - """ # noqa - - chk_invalid = ( - 100 - * (len(new_data) - len([x for x in new_data if x is not None])) - / len(new_data) - ) - - if chk_invalid > pct_invalid: - err = f'Too many ({chk_invalid}%) invalid values in column {col_name}nam' - log.error(err) - raise Exception(err) - - -
[docs]def get_cleaning_func(data_dtype: dtype, custom_cleaning_functions: Dict[str, str]) -> Callable: - """ - For the provided data type, provide the appropriate cleaning function. Below are the defaults, users can either override this function OR impose a custom block. - - :param data_dtype: The data-type (inferred from a column) as prescribed from ``api.dtype`` - - :returns: The appropriate function that will pre-process (clean) data of specified dtype. - """ # noqa - if data_dtype in custom_cleaning_functions: - clean_func = eval(custom_cleaning_functions[data_dtype]) - - elif data_dtype in (dtype.date, dtype.datetime): - clean_func = _standardize_datetime - - elif data_dtype in (dtype.float, dtype.tsarray): - clean_func = _clean_float - - elif data_dtype in (dtype.integer): - clean_func = _clean_int - - elif data_dtype in (dtype.array): - clean_func = _standardize_array - - elif data_dtype in (dtype.tags): - clean_func = _tags_to_tuples - - elif data_dtype in (dtype.quantity): - clean_func = _clean_quantity - - elif data_dtype in ( - dtype.short_text, - dtype.rich_text, - dtype.categorical, - dtype.binary, - dtype.audio, - dtype.image, - dtype.video - ): - clean_func = _clean_text - - else: - raise ValueError(f"{data_dtype} is not supported. Check lightwood.api.dtype") - - return clean_func
- - -# ------------------------- # -# Temporal Cleaning -# ------------------------- # - - -def _standardize_datetime(element: object) -> Optional[float]: - """ - Parses an expected date-time element. Intakes an element that can in theory be anything. - """ - try: - date = parse_dt(str(element)) - except Exception: - try: - date = datetime.datetime.utcfromtimestamp(element) - except Exception: - return None - - return date.timestamp() - - -# ------------------------- # -# Tags/Sequences -# ------------------------- # - -# TODO Make it split on something other than commas -def _tags_to_tuples(tags_str: str) -> Tuple[str]: - """ - Converts comma-separated values into a tuple to preserve a sequence/array. - - Ex: - >> x = 'apples, oranges, bananas' - >> _tags_to_tuples(x) - >> ('apples', 'oranges', 'bananas') - """ - try: - return tuple([x.strip() for x in tags_str.split(",")]) - except Exception: - return tuple() - - -def _standardize_array(element: object) -> Optional[Union[List[float], float]]: - """ - Given an array of numbers in the form ``[1, 2, 3, 4]``, converts into a numerical sequence. - - :param element: An array-like element in a sequence - :returns: standardized array OR scalar number IF edge case - - Ex of edge case: - >> element = [1] - >> _standardize_array(element) - >> 1 - """ - try: - element = str(element) - element = element.rstrip("]").lstrip("[") - element = element.rstrip(" ").lstrip(" ") - element = element.replace(", ", " ").replace(",", " ") - # Handles cases where arrays are numbers - if " " not in element: - element = _clean_float(element) - else: - element = [float(x) for x in element.split(" ")] - except Exception: - pass - - return element - - -# ------------------------- # -# Integers/Floats/Quantities -# ------------------------- # - -def _clean_float(element: object) -> Optional[float]: - """ - Given an element, converts it into float numeric format. If element is NaN, or inf, then returns None. - """ - try: - cleaned_float = text.clean_float(element) - if is_nan_numeric(cleaned_float): - return None - return cleaned_float - except Exception: - return None - - -def _clean_int(element: object) -> Optional[int]: - """ - Given an element, converts it into integer numeric format. If element is NaN, or inf, then returns None. - """ - element = _clean_float(element) - if element is not None: - element = int(element) - return element - - -def _clean_quantity(element: object) -> Optional[float]: - """ - Given a quantity, clean and convert it into float numeric format. If element is NaN, or inf, then returns None. - """ - element = float(re.sub("[^0-9.,]", "", str(element)).replace(",", ".")) - return _clean_float(element) - - -# ------------------------- # -# Text -# ------------------------- # -def _clean_text(element: object) -> str: - return str(element) - - -# ------------------------- # -# Other helpers -# ------------------------- # -def _rm_rows_w_empty_targets(df: pd.DataFrame, target: str) -> pd.DataFrame: - """ - Drop any rows that have targets as unknown. Targets are necessary to train. - - :param df: The input dataframe including the target value - :param target: the column name that is the output target variable - - :returns: Data with any target smissing - """ - # Compare length before/after - len_before = len(df) - - # Use Pandas ```dropna``` to omit any rows with missing values for targets; these cannot be trained - df = df.dropna(subset=[target]) - - # Compare length with after - len_after = len(df) - nr_removed = len_before - len_after - - if nr_removed != 0: - log.warning( - f"Removed {nr_removed} rows because target was missing. Training on these rows is not possible." - ) # noqa - - return df - - -def _remove_columns(data: pd.DataFrame, identifiers: Dict[str, object], target: str, - mode: str, timeseries_settings: TimeseriesSettings, anomaly_detection: bool, - dtype_dict: Dict[str, dtype]) -> pd.DataFrame: - """ - Drop columns we don't want to use in order to train or predict - - :param data: The raw data - :param dtype_dict: Type information for each column - :param identifiers: A dict containing all identifier typed columns - :param target: The target columns - :param mode: Can be "predict" or "train" - :param timeseries_settings: Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object - :param anomaly_detection: Are we detecting anomalies with this predictor? - - :returns: A (new) dataframe without the dropped columns - """ # noqa - data = deepcopy(data) - to_drop = [*[x for x in identifiers.keys() if x != target], - *[x for x in data.columns if x in dtype_dict and dtype_dict[x] == dtype.invalid]] - exceptions = ["__mdb_make_predictions"] - to_drop = [x for x in to_drop if x in data.columns] - data = data.drop(columns=to_drop) - - if mode == "train": - data = _rm_rows_w_empty_targets(data, target) - if mode == "predict": - if ( - target in data.columns - and not timeseries_settings.use_previous_target - and not anomaly_detection - ): - data = data.drop(columns=[target]) - - # Drop extra columns - for name in list(data.columns): - if name not in dtype_dict and name not in exceptions: - data = data.drop(columns=[name]) - - return data - - -def _get_columns_to_clean(data: pd.DataFrame, dtype_dict: Dict[str, dtype], mode: str, target: str) -> List[str]: - """ - :param data: The raw data - :param dtype_dict: Type information for each column - :param target: The target columns - :param mode: Can be "predict" or "train" - - :returns: A list of columns that we want to clean - """ # noqa - - cleanable_columns = [] - for name, _ in dtype_dict.items(): - if mode == "predict": - if name == target: - continue - if name in data.columns: - cleanable_columns.append(name) - return cleanable_columns -
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/index.html b/docs/_modules/index.html deleted file mode 100644 index 811540e75..000000000 --- a/docs/_modules/index.html +++ /dev/null @@ -1,259 +0,0 @@ - - - - - - - - - - Overview: module code — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Overview: module code
  • - - -
  • - -
  • - -
- - -
-
- - -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/analysis/analyze.html b/docs/_modules/lightwood/analysis/analyze.html deleted file mode 100644 index 65168f112..000000000 --- a/docs/_modules/lightwood/analysis/analyze.html +++ /dev/null @@ -1,314 +0,0 @@ - - - - - - - - - - lightwood.analysis.analyze — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.analysis.analyze
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.analysis.analyze

-from typing import Dict, List, Tuple, Optional
-
-from lightwood.helpers.log import log
-from lightwood.api import dtype
-from lightwood.ensemble import BaseEnsemble
-from lightwood.analysis.base import BaseAnalysisBlock
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood.encoder.text.pretrained import PretrainedLangEncoder
-from lightwood.api.types import ModelAnalysis, StatisticalAnalysis, TimeseriesSettings, PredictionArguments
-
-
-
[docs]def model_analyzer( - predictor: BaseEnsemble, - data: EncodedDs, - train_data: EncodedDs, - stats_info: StatisticalAnalysis, - target: str, - ts_cfg: TimeseriesSettings, - dtype_dict: Dict[str, str], - accuracy_functions, - analysis_blocks: Optional[List[BaseAnalysisBlock]] = [] -) -> Tuple[ModelAnalysis, Dict[str, object]]: - """ - Analyses model on a validation subset to evaluate accuracy, estimate feature importance and generate a - calibration model to estimating confidence in future predictions. - - Additionally, any user-specified analysis blocks (see class `BaseAnalysisBlock`) are also called here. - - :return: - runtime_analyzer: This dictionary object gets populated in a sequential fashion with data generated from - any `.analyze()` block call. This dictionary object is stored in the predictor itself, and used when - calling the `.explain()` method of all analysis blocks when generating predictions. - - model_analysis: `ModelAnalysis` object that contains core analysis metrics, not necessarily needed when predicting. - """ - - runtime_analyzer = {} - data_type = dtype_dict[target] - - # retrieve encoded data representations - encoded_train_data = train_data - encoded_val_data = data - data = encoded_val_data.data_frame - input_cols = list([col for col in data.columns if col != target]) - - # predictive task - is_numerical = data_type in (dtype.integer, dtype.float, dtype.array, dtype.tsarray, dtype.quantity) - is_classification = data_type in (dtype.categorical, dtype.binary) - is_multi_ts = ts_cfg.is_timeseries and ts_cfg.nr_predictions > 1 - has_pretrained_text_enc = any([isinstance(enc, PretrainedLangEncoder) - for enc in encoded_train_data.encoders.values()]) - - # raw predictions for validation dataset - args = {} if not is_classification else {"predict_proba": True} - normal_predictions = predictor(encoded_val_data, args=PredictionArguments.from_dict(args)) - normal_predictions = normal_predictions.set_index(data.index) - - # ------------------------- # - # Run analysis blocks, both core and user-defined - # ------------------------- # - kwargs = { - 'predictor': predictor, - 'target': target, - 'input_cols': input_cols, - 'dtype_dict': dtype_dict, - 'normal_predictions': normal_predictions, - 'data': data, - 'train_data': train_data, - 'encoded_val_data': encoded_val_data, - 'is_classification': is_classification, - 'is_numerical': is_numerical, - 'is_multi_ts': is_multi_ts, - 'stats_info': stats_info, - 'ts_cfg': ts_cfg, - 'accuracy_functions': accuracy_functions, - 'has_pretrained_text_enc': has_pretrained_text_enc - } - - for block in analysis_blocks: - log.info("The block %s is now running its analyze() method", block.__class__.__name__) - runtime_analyzer = block.analyze(runtime_analyzer, **kwargs) - - # ------------------------- # - # Populate ModelAnalysis object - # ------------------------- # - model_analysis = ModelAnalysis( - accuracies=runtime_analyzer.get('score_dict', {}), - accuracy_histogram=runtime_analyzer.get('acc_histogram', {}), - accuracy_samples=runtime_analyzer.get('acc_samples', {}), - train_sample_size=len(encoded_train_data), - test_sample_size=len(encoded_val_data), - confusion_matrix=runtime_analyzer['cm'], - column_importances=runtime_analyzer.get('column_importances', {}), - histograms=stats_info.histograms, - dtypes=dtype_dict - ) - - return model_analysis, runtime_analyzer
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/analysis/base.html b/docs/_modules/lightwood/analysis/base.html deleted file mode 100644 index df163faf2..000000000 --- a/docs/_modules/lightwood/analysis/base.html +++ /dev/null @@ -1,262 +0,0 @@ - - - - - - - - - - lightwood.analysis.base — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.analysis.base
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.analysis.base

-from typing import Tuple, Dict, Optional
-
-import pandas as pd
-from lightwood.helpers.log import log
-
-
-
[docs]class BaseAnalysisBlock: - """Class to be inherited by any analysis/explainer block.""" - def __init__(self, - deps: Optional[Tuple] = () - ): - - self.dependencies = deps # can be parallelized when there are no dependencies @TODO enforce - -
[docs] def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]: - """ - This method should be called once during the analysis phase, or not called at all. - It computes any information that the block may either output to the model analysis object, - or use at inference time when `.explain()` is called (in this case, make sure all needed - objects are added to the runtime analyzer so that `.explain()` can access them). - - :param info: Dictionary where any new information or objects are added. The next analysis block will use - the output of the previous block as a starting point. - :param kwargs: Dictionary with named variables from either the core analysis or the rest of the prediction - pipeline. - """ - log.info(f"{self.__class__.__name__}.analyze() has not been implemented, no modifications will be done to the model analysis.") # noqa - return info
- -
[docs] def explain(self, - row_insights: pd.DataFrame, - global_insights: Dict[str, object], **kwargs) -> Tuple[pd.DataFrame, Dict[str, object]]: - """ - This method should be called once during the explaining phase at inference time, or not called at all. - Additional explanations can be at an instance level (row-wise) or global. - For the former, return a data frame with any new insights. For the latter, a dictionary is required. - - :param row_insights: dataframe with previously computed row-level explanations. - :param global_insights: dict() with any explanations that concern all predicted instances or the model itself. - - :returns: - - row_insights: modified input dataframe with any new row insights added here. - - global_insights: dict() with any explanations that concern all predicted instances or the model itself. - """ - log.info(f"{self.__class__.__name__}.explain() has not been implemented, no modifications will be done to the data insights.") # noqa - return row_insights, global_insights
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/analysis/explain.html b/docs/_modules/lightwood/analysis/explain.html deleted file mode 100644 index 33e74a5b1..000000000 --- a/docs/_modules/lightwood/analysis/explain.html +++ /dev/null @@ -1,308 +0,0 @@ - - - - - - - - - - lightwood.analysis.explain — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.analysis.explain
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.analysis.explain

-from typing import Optional, List, Dict
-import torch
-import pandas as pd
-
-from lightwood.helpers.log import log
-from lightwood.api.types import TimeseriesSettings
-from lightwood.helpers.ts import get_inferred_timestamps
-from lightwood.analysis.base import BaseAnalysisBlock
-
-
-
[docs]def explain(data: pd.DataFrame, - encoded_data: torch.Tensor, - predictions: pd.DataFrame, - timeseries_settings: TimeseriesSettings, - analysis: Dict, - target_name: str, - target_dtype: str, - - positive_domain: bool, # @TODO: pass inside a {} with params for each block to avoid signature overload - fixed_confidence: float, - anomaly_detection: bool, - - # forces specific confidence level in ICP - anomaly_error_rate: float, - - # ignores anomaly detection for N steps after an - # initial anomaly triggers the cooldown period; - # implicitly assumes series are regularly spaced - anomaly_cooldown: int, - - explainer_blocks: Optional[List[BaseAnalysisBlock]] = [], - ts_analysis: Optional[Dict] = {} - ): - """ - This procedure runs at the end of every normal `.predict()` call. Its goal is to generate prediction insights, - potentially using information generated at the model analysis stage (e.g. confidence estimation). - - As in `analysis()`, any user-specified analysis blocks (see class `BaseAnalysisBlock`) are also called here. - - :return: - row_insights: a DataFrame containing predictions and all generated insights at a row-level. - """ - - # ------------------------- # - # Setup base insights - # ------------------------- # - data = data.reset_index(drop=True) - - row_insights = pd.DataFrame() - global_insights = {} - row_insights['prediction'] = predictions['prediction'] - - if target_name in data.columns: - row_insights['truth'] = data[target_name] - else: - row_insights['truth'] = [None] * len(predictions['prediction']) - - if timeseries_settings.is_timeseries: - if timeseries_settings.group_by: - for col in timeseries_settings.group_by: - row_insights[f'group_{col}'] = data[col] - - for col in timeseries_settings.order_by: - row_insights[f'order_{col}'] = data[col] - - for col in timeseries_settings.order_by: - row_insights[f'order_{col}'] = get_inferred_timestamps( - row_insights, col, ts_analysis['deltas'], timeseries_settings) - - kwargs = { - 'data': data, - 'encoded_data': encoded_data, - 'predictions': predictions, - 'analysis': analysis, - 'target_name': target_name, - 'target_dtype': target_dtype, - 'tss': timeseries_settings, - 'positive_domain': positive_domain, - 'fixed_confidence': fixed_confidence, - 'anomaly_detection': anomaly_detection, - 'anomaly_error_rate': anomaly_error_rate, - 'anomaly_cooldown': anomaly_cooldown - } - - # ------------------------- # - # Call explanation blocks - # ------------------------- # - for block in explainer_blocks: - log.info("The block %s is now running its explain() method", block.__class__.__name__) - row_insights, global_insights = block.explain(row_insights, global_insights, **kwargs) - - return row_insights, global_insights
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/analysis/helpers/acc_stats.html b/docs/_modules/lightwood/analysis/helpers/acc_stats.html deleted file mode 100644 index 676a39c1b..000000000 --- a/docs/_modules/lightwood/analysis/helpers/acc_stats.html +++ /dev/null @@ -1,402 +0,0 @@ - - - - - - - - - - lightwood.analysis.helpers.acc_stats — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.analysis.helpers.acc_stats
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.analysis.helpers.acc_stats

-import random
-from types import SimpleNamespace
-from typing import Dict, Optional
-
-import numpy as np
-from sklearn.metrics import confusion_matrix
-
-from lightwood.api.dtype import dtype
-from lightwood.analysis.base import BaseAnalysisBlock
-from lightwood.helpers.general import evaluate_accuracy
-
-
-
[docs]class AccStats(BaseAnalysisBlock): - """ Computes accuracy stats and a confusion matrix for the validation dataset """ - - def __init__(self, deps=('ICP',)): - super().__init__(deps=deps) # @TODO: enforce that this actually prevents early execution somehow - -
[docs] def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]: - ns = SimpleNamespace(**kwargs) - - # @TODO: maybe pass ts_analysis to trigger group-wise MASE instead of R2 mean, though it wouldn't be 0-1 bounded - info['score_dict'] = evaluate_accuracy(ns.data, ns.normal_predictions['prediction'], - ns.target, ns.accuracy_functions) - info['normal_accuracy'] = np.mean(list(info['score_dict'].values())) - - self.fit(ns, info['result_df']) - info['val_overall_acc'], info['acc_histogram'], info['cm'], info['acc_samples'] = self.get_accuracy_stats() - return info
- - def fit(self, ns: SimpleNamespace, conf=Optional[np.ndarray]): - self.col_stats = ns.dtype_dict - self.target = ns.target - self.input_cols = list(ns.dtype_dict.keys()) - self.buckets = ns.stats_info.buckets if ns.stats_info.buckets else {} - - self.normal_predictions_bucketized = [] - self.real_values_bucketized = [] - self.numerical_samples_arr = [] - - column_indexes = {} - for i, col in enumerate(self.input_cols): - column_indexes[col] = i - - real_present_inputs_arr = [] - for _, row in ns.data.iterrows(): - present_inputs = [1] * len(self.input_cols) - for i, col in enumerate(self.input_cols): - if str(row[col]) in ('None', 'nan', '', 'Nan', 'NAN', 'NaN'): - present_inputs[i] = 0 - real_present_inputs_arr.append(present_inputs) - - for n in range(len(ns.normal_predictions)): - row = ns.data.iloc[n] - real_value = row[self.target] - predicted_value = ns.normal_predictions.iloc[n]['prediction'] - - if isinstance(predicted_value, list): - # T+N time series, for now we compare the T+1 prediction only @TODO: generalize - predicted_value = predicted_value[0] - - predicted_value = predicted_value \ - if self.col_stats[self.target] not in [dtype.integer, dtype.float, dtype.quantity] \ - else float(predicted_value) - - real_value = real_value \ - if self.col_stats[self.target] not in [dtype.integer, dtype.float, dtype.quantity] \ - else float(real_value) - - if self.buckets: - bucket = self.buckets[self.target] - predicted_value_b = get_value_bucket(predicted_value, bucket, self.col_stats[self.target]) - real_value_b = get_value_bucket(real_value, bucket, self.col_stats[self.target]) - else: - predicted_value_b = predicted_value - real_value_b = real_value - - if conf is not None and self.col_stats[self.target] in [dtype.integer, dtype.float, dtype.quantity]: - predicted_range = conf.iloc[n][['lower', 'upper']].tolist() - else: - predicted_range = (predicted_value_b, predicted_value_b) - - self.real_values_bucketized.append(real_value_b) - self.normal_predictions_bucketized.append(predicted_value_b) - if conf is not None and self.col_stats[self.target] in [dtype.integer, dtype.float, dtype.quantity]: - self.numerical_samples_arr.append((real_value, predicted_range)) - - def get_accuracy_stats(self, is_classification=None, is_numerical=None): - bucket_accuracy = {} - bucket_acc_counts = {} - for i, bucket in enumerate(self.normal_predictions_bucketized): - if bucket not in bucket_acc_counts: - bucket_acc_counts[bucket] = [] - - if len(self.numerical_samples_arr) != 0: - bucket_acc_counts[bucket].append(self.numerical_samples_arr[i][1][0] < - self.numerical_samples_arr[i][0] < self.numerical_samples_arr[i][1][1]) # noqa - else: - bucket_acc_counts[bucket].append(1 if bucket == self.real_values_bucketized[i] else 0) - - for bucket in bucket_acc_counts: - bucket_accuracy[bucket] = sum(bucket_acc_counts[bucket]) / len(bucket_acc_counts[bucket]) - - accuracy_count = [] - for counts in list(bucket_acc_counts.values()): - accuracy_count += counts - - overall_accuracy = sum(accuracy_count) / len(accuracy_count) - - for bucket in range(len(self.buckets)): - if bucket not in bucket_accuracy: - if bucket in self.real_values_bucketized: - # If it was never predicted, but it did exist as a real value, then assume 0% confidence when it does get predicted # noqa - bucket_accuracy[bucket] = 0 - - for bucket in range(len(self.buckets)): - if bucket not in bucket_accuracy: - # If it wasn't seen either in the real values or in the predicted values, assume average confidence (maybe should be 0 instead ?) # noqa - bucket_accuracy[bucket] = overall_accuracy - - accuracy_histogram = { - 'buckets': list(bucket_accuracy.keys()), - 'accuracies': list(bucket_accuracy.values()), - 'is_classification': is_classification, - 'is_numerical': is_numerical - } - - labels = list(set([*self.real_values_bucketized, *self.normal_predictions_bucketized])) - matrix = confusion_matrix(self.real_values_bucketized, self.normal_predictions_bucketized, labels=labels) - matrix = [[int(y) if str(y) != 'nan' else 0 for y in x] for x in matrix] - - target_bucket = self.buckets[self.target] - bucket_values = [target_bucket[i] if i < len(target_bucket) else None for i in labels] - - cm = { - 'matrix': matrix, - 'predicted': bucket_values, - 'real': bucket_values - } - - accuracy_samples = None - if len(self.numerical_samples_arr) > 0: - nr_samples = min(400, len(self.numerical_samples_arr)) - sampled_numerical_samples_arr = random.sample(self.numerical_samples_arr, nr_samples) - accuracy_samples = { - 'y': [x[0] for x in sampled_numerical_samples_arr], - 'x': [x[1] for x in sampled_numerical_samples_arr] - } - - return overall_accuracy, accuracy_histogram, cm, accuracy_samples
- - -def get_value_bucket(value, buckets, target_dtype): - """ - :return: The bucket in the `histogram` in which our `value` falls - """ - if buckets is None: - return None - - if target_dtype in (dtype.binary, dtype.categorical): - if value in buckets: - bucket = buckets.index(value) - else: - bucket = len(buckets) # for null values - - elif target_dtype in (dtype.integer, dtype.float, dtype.quantity): - bucket = closest(buckets, value) - else: - bucket = len(buckets) # for null values - - return bucket - - -def closest(arr, value): - """ - :return: The index of the member of `arr` which is closest to `value` - """ - if value is None: - return -1 - - for i, ele in enumerate(arr): - value = float(str(value).replace(',', '.')) - if ele > value: - return i - 1 - - return len(arr) - 1 -
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/analysis/helpers/feature_importance.html b/docs/_modules/lightwood/analysis/helpers/feature_importance.html deleted file mode 100644 index 743e8869c..000000000 --- a/docs/_modules/lightwood/analysis/helpers/feature_importance.html +++ /dev/null @@ -1,287 +0,0 @@ - - - - - - - - - - lightwood.analysis.helpers.feature_importance — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.analysis.helpers.feature_importance
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.analysis.helpers.feature_importance

-from copy import deepcopy
-from types import SimpleNamespace
-from typing import Dict
-
-import torch
-import numpy as np
-
-from lightwood.analysis.base import BaseAnalysisBlock
-from lightwood.helpers.general import evaluate_accuracy
-from lightwood.analysis.nc.util import t_softmax
-from lightwood.api.types import PredictionArguments
-
-
-
[docs]class GlobalFeatureImportance(BaseAnalysisBlock): - """ - Analysis block that estimates column importance with a variant of the LOCO (leave-one-covariate-out) algorithm. - - Roughly speaking, the procedure: - - iterates over all input columns - - if the input column is optional, then make a predict with its values set to None - - compare this accuracy with the accuracy obtained using all data - - all accuracy differences are passed through a softmax and reported as estimated column importance scores - - Note that, crucially, this method does not refit the predictor at any point. - - Reference: - https://compstat-lmu.github.io/iml_methods_limitations/pfi.html - """ - def __init__(self, disable_column_importance): - super().__init__() - self.disable_column_importance = disable_column_importance - -
[docs] def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]: - ns = SimpleNamespace(**kwargs) - - if self.disable_column_importance or ns.ts_cfg.is_timeseries or ns.has_pretrained_text_enc: - info['column_importances'] = None - else: - empty_input_accuracy = {} - ignorable_input_cols = [x for x in ns.input_cols if (not ns.ts_cfg.is_timeseries or - (x not in ns.ts_cfg.order_by and - x not in ns.ts_cfg.historical_columns))] - for col in ignorable_input_cols: - partial_data = deepcopy(ns.encoded_val_data) - partial_data.clear_cache() - partial_data.data_frame[col] = [None] * len(partial_data.data_frame[col]) - - args = {'predict_proba': True} if ns.is_classification else {} - empty_input_preds = ns.predictor(partial_data, args=PredictionArguments.from_dict(args)) - - empty_input_accuracy[col] = np.mean(list(evaluate_accuracy( - ns.data, - empty_input_preds['prediction'], - ns.target, - ns.accuracy_functions - ).values())) - - column_importances = {} - acc_increases = [] - for col in ignorable_input_cols: - accuracy_increase = (info['normal_accuracy'] - empty_input_accuracy[col]) - acc_increases.append(accuracy_increase) - - # low 0.2 temperature to accentuate differences - acc_increases = t_softmax(torch.Tensor([acc_increases]), t=0.2).tolist()[0] - for col, inc in zip(ignorable_input_cols, acc_increases): - column_importances[col] = inc # scores go from 0 to 1 - - info['column_importances'] = column_importances - - return info
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/analysis/nc/calibrate.html b/docs/_modules/lightwood/analysis/nc/calibrate.html deleted file mode 100644 index 0225f0603..000000000 --- a/docs/_modules/lightwood/analysis/nc/calibrate.html +++ /dev/null @@ -1,598 +0,0 @@ - - - - - - - - - - lightwood.analysis.nc.calibrate — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.analysis.nc.calibrate
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.analysis.nc.calibrate

-from copy import deepcopy
-from itertools import product
-from typing import Dict, Tuple
-from types import SimpleNamespace
-
-import numpy as np
-import pandas as pd
-from sklearn.preprocessing import OneHotEncoder
-
-from lightwood.api.dtype import dtype
-from lightwood.helpers.ts import add_tn_conf_bounds
-
-from lightwood.analysis.base import BaseAnalysisBlock
-from lightwood.analysis.nc.norm import Normalizer
-from lightwood.analysis.nc.icp import IcpRegressor, IcpClassifier
-from lightwood.analysis.nc.base import CachedRegressorAdapter, CachedClassifierAdapter
-from lightwood.analysis.nc.nc import BoostedAbsErrorErrFunc, RegressorNc, ClassifierNc, MarginErrFunc
-from lightwood.analysis.nc.util import clean_df, set_conf_range, get_numeric_conf_range, \
-    get_categorical_conf, get_anomalies
-
-
-
[docs]class ICP(BaseAnalysisBlock): - """ Confidence estimation block, uses inductive conformal predictors (ICPs) for model agnosticity """ - - def __init__(self, - fixed_significance: float, - positive_domain: bool, - confidence_normalizer: bool - ): - super().__init__() - self.fixed_significance = fixed_significance - self.positive_domain = positive_domain - self.confidence_normalizer = confidence_normalizer - -
[docs] def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]: - ns = SimpleNamespace(**kwargs) - - data_type = ns.dtype_dict[ns.target] - output = {'icp': {'__mdb_active': False}} - - fit_params = {'nr_preds': ns.ts_cfg.nr_predictions or 0, 'columns_to_ignore': []} - fit_params['columns_to_ignore'].extend([f'timestep_{i}' for i in range(1, fit_params['nr_preds'])]) - - if ns.is_classification: - if ns.predictor.supports_proba: - all_cat_cols = [col for col in ns.normal_predictions.columns if '__mdb_proba' in col] - all_classes = np.array([col.replace('__mdb_proba_', '') for col in all_cat_cols]) - else: - class_keys = sorted(ns.encoded_val_data.encoders[ns.target].rev_map.keys()) - all_classes = np.array([ns.encoded_val_data.encoders[ns.target].rev_map[idx] for idx in class_keys]) - - if data_type != dtype.tags: - enc = OneHotEncoder(sparse=False, handle_unknown='ignore') - enc.fit(all_classes.reshape(-1, 1)) - output['label_encoders'] = enc # needed to repr cat labels inside nonconformist - else: - output['label_encoders'] = None - - adapter = CachedClassifierAdapter - nc_function = MarginErrFunc() - nc_class = ClassifierNc - icp_class = IcpClassifier - - else: - adapter = CachedRegressorAdapter - nc_function = BoostedAbsErrorErrFunc() - nc_class = RegressorNc - icp_class = IcpRegressor - - result_df = pd.DataFrame() - - if ns.is_numerical or (ns.is_classification and data_type != dtype.tags): - model = adapter(ns.predictor) - - norm_params = {'target': ns.target, 'dtype_dict': ns.dtype_dict, 'predictor': ns.predictor, - 'encoders': ns.encoded_val_data.encoders, 'is_multi_ts': ns.is_multi_ts, 'stop_after': 1e2} - if self.confidence_normalizer: - normalizer = Normalizer(fit_params=norm_params) - normalizer.fit(ns.train_data) - normalizer.prediction_cache = normalizer(ns.encoded_val_data) - else: - normalizer = None - - # instance the ICP - nc = nc_class(model, nc_function, normalizer=normalizer) - icp = icp_class(nc) - - output['icp']['__default'] = icp - - # setup prediction cache to avoid additional .predict() calls - if ns.is_classification: - if ns.predictor.supports_proba: - icp.nc_function.model.prediction_cache = ns.normal_predictions[all_cat_cols].values - else: - predicted_classes = pd.get_dummies( - ns.normal_predictions['prediction']).values # inflate to one-hot enc - icp.nc_function.model.prediction_cache = predicted_classes - - elif ns.is_multi_ts: - # we fit ICPs for time series confidence bounds only at t+1 forecast - icp.nc_function.model.prediction_cache = np.array([p[0] for p in ns.normal_predictions['prediction']]) - else: - icp.nc_function.model.prediction_cache = np.array(ns.normal_predictions['prediction']) - - if not ns.is_classification: - output['df_target_stddev'] = {'__default': ns.stats_info.df_target_stddev} - - # fit additional ICPs in time series tasks with grouped columns - if ns.ts_cfg.is_timeseries and ns.ts_cfg.group_by: - - # create an ICP for each possible group - group_info = ns.data[ns.ts_cfg.group_by].to_dict('list') - all_group_combinations = list(product(*[set(x) for x in group_info.values()])) - output['icp']['__mdb_groups'] = all_group_combinations - output['icp']['__mdb_group_keys'] = [x for x in group_info.keys()] - - for combination in all_group_combinations: - output['icp'][frozenset(combination)] = deepcopy(icp) - - # calibrate ICP - icp_df = deepcopy(ns.data) - icp_df, y = clean_df(icp_df, ns.target, ns.is_classification, output.get('label_encoders', None)) - output['icp']['__default'].index = icp_df.columns - output['icp']['__default'].calibrate(icp_df.values, y) - - # get confidence estimation for validation dataset - conf, ranges = set_conf_range( - icp_df, icp, ns.dtype_dict[ns.target], - output, positive_domain=self.positive_domain, significance=self.fixed_significance) - if not ns.is_classification: - result_df = pd.DataFrame(index=ns.data.index, columns=['confidence', 'lower', 'upper'], dtype=float) - result_df.loc[icp_df.index, 'lower'] = ranges[:, 0] - result_df.loc[icp_df.index, 'upper'] = ranges[:, 1] - else: - result_df = pd.DataFrame(index=ns.data.index, columns=['confidence'], dtype=float) - - result_df.loc[icp_df.index, 'confidence'] = conf - - # calibrate additional grouped ICPs - if ns.ts_cfg.is_timeseries and ns.ts_cfg.group_by: - icps = output['icp'] - group_keys = icps['__mdb_group_keys'] - - # add all predictions to DF - icps_df = deepcopy(ns.data) - if ns.is_multi_ts: - icps_df[f'__predicted_{ns.target}'] = [p[0] for p in ns.normal_predictions['prediction']] - else: - icps_df[f'__predicted_{ns.target}'] = ns.normal_predictions['prediction'] - - for group in icps['__mdb_groups']: - icp_df = icps_df - if icps[frozenset(group)].nc_function.normalizer is not None: - icp_df[f'__norm_{ns.target}'] = icps[frozenset(group)].nc_function.normalizer.prediction_cache - - # filter irrelevant rows for each group combination - for key, val in zip(group_keys, group): - icp_df = icp_df[icp_df[key] == val] - - # save relevant predictions in the caches, then calibrate the ICP - pred_cache = icp_df.pop(f'__predicted_{ns.target}').values - icps[frozenset(group)].nc_function.model.prediction_cache = pred_cache - icp_df, y = clean_df(icp_df, ns.target, ns.is_classification, output.get('label_encoders', None)) - if icps[frozenset(group)].nc_function.normalizer is not None: - icps[frozenset(group)].nc_function.normalizer.prediction_cache = icp_df.pop( - f'__norm_{ns.target}').values - - icps[frozenset(group)].index = icp_df.columns # important at inference time - icps[frozenset(group)].calibrate(icp_df.values, y) - - # save training std() for bounds width selection - if not ns.is_classification: - icp_train_df = ns.data - for key, val in zip(group_keys, group): - icp_train_df = icp_train_df[icp_train_df[key] == val] - y_train = icp_train_df[ns.target].values - output['df_target_stddev'][frozenset(group)] = y_train.std() - - # get bounds for relevant rows in validation dataset - conf, group_ranges = set_conf_range( - icp_df, icps[frozenset(group)], - ns.dtype_dict[ns.target], - output, group=frozenset(group), - positive_domain=self.positive_domain, significance=self.fixed_significance) - # save group bounds - if not ns.is_classification: - result_df.loc[icp_df.index, 'lower'] = group_ranges[:, 0] - result_df.loc[icp_df.index, 'upper'] = group_ranges[:, 1] - - result_df.loc[icp_df.index, 'confidence'] = conf - - # consolidate all groups here - output['icp']['__mdb_active'] = True - - output['result_df'] = result_df - - info = {**info, **output} - return info
- -
[docs] def explain(self, row_insights: pd.DataFrame, global_insights: Dict[str, object], - **kwargs) -> Tuple[pd.DataFrame, Dict[str, object]]: - ns = SimpleNamespace(**kwargs) - - if ns.analysis['icp']['__mdb_active']: - icp_X = deepcopy(ns.data) - - # replace observed data w/predictions - preds = ns.predictions['prediction'] - if ns.tss.is_timeseries and ns.tss.nr_predictions > 1: - preds = [p[0] for p in preds] - - for col in [f'timestep_{i}' for i in range(1, ns.tss.nr_predictions)]: - if col in icp_X.columns: - icp_X.pop(col) # erase ignorable columns - - icp_X[ns.target_name] = preds - - is_categorical = ns.target_dtype in (dtype.binary, dtype.categorical, dtype.array) - is_numerical = ns.target_dtype in [dtype.integer, dtype.float, - dtype.quantity] or ns.target_dtype in (dtype.array, dtype.tsarray) - is_anomaly_task = is_numerical and ns.tss.is_timeseries and ns.anomaly_detection - - if (is_numerical or is_categorical) and ns.analysis['icp'].get('__mdb_active', False): - - # reorder DF index - index = ns.analysis['icp']['__default'].index.values - index = np.append(index, ns.target_name) if ns.target_name not in index else index - icp_X = icp_X.reindex(columns=index) # important, else bounds can be invalid - - # only one normalizer, even if it's a grouped time series task - normalizer = ns.analysis['icp']['__default'].nc_function.normalizer - if normalizer: - normalizer.prediction_cache = normalizer(ns.encoded_data) - icp_X['__mdb_selfaware_scores'] = normalizer.prediction_cache - - # get ICP predictions - result_cols = ['lower', 'upper', 'significance'] if is_numerical else ['significance'] - result = pd.DataFrame(index=icp_X.index, columns=result_cols) - - # base ICP - X = deepcopy(icp_X) - # Calling `values` multiple times increased runtime of this function; referenced var is faster - icp_values = X.values - - # get all possible ranges - if ns.tss.is_timeseries and ns.tss.nr_predictions > 1 and is_numerical: - - # bounds in time series are only given for the first forecast - ns.analysis['icp']['__default'].nc_function.model.prediction_cache = \ - [p[0] for p in ns.predictions['prediction']] - all_confs = ns.analysis['icp']['__default'].predict(icp_values) - - elif is_numerical: - ns.analysis['icp']['__default'].nc_function.model.prediction_cache = ns.predictions['prediction'] - all_confs = ns.analysis['icp']['__default'].predict(icp_values) - - # categorical - else: - predicted_proba = True if any(['__mdb_proba' in col for col in ns.predictions.columns]) else False - if predicted_proba: - all_cat_cols = [col for col in ns.predictions.columns if '__mdb_proba' in col] - class_dists = ns.predictions[all_cat_cols].values - for icol, cat_col in enumerate(all_cat_cols): - row_insights.loc[X.index, cat_col] = class_dists[:, icol] - else: - class_dists = pd.get_dummies(ns.predictions['prediction']).values - - ns.analysis['icp']['__default'].nc_function.model.prediction_cache = class_dists - - conf_candidates = list(range(20)) + list(range(20, 100, 10)) - all_ranges = np.array( - [ns.analysis['icp']['__default'].predict(icp_values, significance=s / 100) - for s in conf_candidates]) - all_confs = np.swapaxes(np.swapaxes(all_ranges, 0, 2), 0, 1) - - # convert (B, 2, 99) into (B, 2) given width or error rate constraints - if is_numerical: - significances = ns.fixed_confidence - if significances is not None: - confs = all_confs[:, :, int(100 * (1 - significances)) - 1] - else: - error_rate = ns.anomaly_error_rate if is_anomaly_task else None - significances, confs = get_numeric_conf_range(all_confs, - df_target_stddev=ns.analysis['df_target_stddev'], - positive_domain=self.positive_domain, - error_rate=error_rate) - result.loc[X.index, 'lower'] = confs[:, 0] - result.loc[X.index, 'upper'] = confs[:, 1] - else: - conf_candidates = list(range(20)) + list(range(20, 100, 10)) - significances = get_categorical_conf(all_confs, conf_candidates) - - result.loc[X.index, 'significance'] = significances - - # grouped time series, we replace bounds in rows that have a trained ICP - if ns.analysis['icp'].get('__mdb_groups', False): - icps = ns.analysis['icp'] - group_keys = icps['__mdb_group_keys'] - - for group in icps['__mdb_groups']: - icp = icps[frozenset(group)] - - # check ICP has calibration scores - if icp.cal_scores[0].shape[0] > 0: - - # filter rows by group - X = deepcopy(icp_X) - for key, val in zip(group_keys, group): - X = X[X[key] == val] - - if X.size > 0: - # set ICP caches - icp.nc_function.model.prediction_cache = X.pop(ns.target_name).values - if icp.nc_function.normalizer: - icp.nc_function.normalizer.prediction_cache = X.pop('__mdb_selfaware_scores').values - - # predict and get confidence level given width or error rate constraints - if is_numerical: - all_confs = icp.predict(X.values) - error_rate = ns.anomaly_error_rate if is_anomaly_task else None - significances, confs = get_numeric_conf_range( - all_confs, - df_target_stddev=ns.analysis['df_target_stddev'], - positive_domain=self.positive_domain, - group=frozenset(group), - error_rate=error_rate - ) - - # only replace where grouped ICP is more informative (i.e. tighter) - if ns.fixed_confidence is None: - default_widths = result.loc[X.index, 'upper'] - result.loc[X.index, 'lower'] - grouped_widths = np.subtract(confs[:, 1], confs[:, 0]) - insert_index = (default_widths > grouped_widths)[lambda x: x.isin([True])].index - conf_index = (default_widths.reset_index(drop=True) > - grouped_widths)[lambda x: x.isin([True])].index - - result.loc[insert_index, 'lower'] = confs[conf_index, 0] - result.loc[insert_index, 'upper'] = confs[conf_index, 1] - result.loc[insert_index, 'significance'] = significances[conf_index] - - else: - conf_candidates = list(range(20)) + list(range(20, 100, 10)) - all_ranges = np.array( - [icp.predict(X.values, significance=s / 100) - for s in conf_candidates]) - all_confs = np.swapaxes(np.swapaxes(all_ranges, 0, 2), 0, 1) - significances = get_categorical_conf(all_confs, conf_candidates) - result.loc[X.index, 'significance'] = significances - - row_insights['confidence'] = result['significance'].astype(float).tolist() - - if is_numerical: - row_insights['lower'] = result['lower'].astype(float) - row_insights['upper'] = result['upper'].astype(float) - - # anomaly detection - if is_anomaly_task: - anomalies = get_anomalies(row_insights, - ns.data[ns.target_name], - cooldown=ns.anomaly_cooldown) - row_insights['anomaly'] = anomalies - - if ns.tss.is_timeseries and ns.tss.nr_predictions > 1 and is_numerical: - row_insights = add_tn_conf_bounds(row_insights, ns.tss) - - # Make sure the target and real values are of an appropriate type - if ns.tss.is_timeseries and ns.tss.nr_predictions > 1: - # Array output that are not of type <array> originally are odd and I'm not sure how to handle them - # Or if they even need handling yet - pass - elif ns.target_dtype in (dtype.integer): - row_insights['prediction'] = row_insights['prediction'].clip(-pow(2, 62), pow(2, 62)).astype(int) - row_insights['upper'] = row_insights['upper'].clip(-pow(2, 62), pow(2, 62)).astype(int) - row_insights['lower'] = row_insights['lower'].clip(-pow(2, 62), pow(2, 62)).astype(int) - elif ns.target_dtype in (dtype.float, dtype.quantity): - row_insights['prediction'] = row_insights['prediction'].astype(float) - row_insights['upper'] = row_insights['upper'].astype(float) - row_insights['lower'] = row_insights['lower'].astype(float) - elif ns.target_dtype in (dtype.short_text, dtype.rich_text, dtype.binary, dtype.categorical): - row_insights['prediction'] = row_insights['prediction'].astype(str) - - return row_insights, global_insights
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/data/cleaner.html b/docs/_modules/lightwood/data/cleaner.html deleted file mode 100644 index 8c507641c..000000000 --- a/docs/_modules/lightwood/data/cleaner.html +++ /dev/null @@ -1,538 +0,0 @@ - - - - - - - - - - lightwood.data.cleaner — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.data.cleaner
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.data.cleaner

-import re
-from copy import deepcopy
-
-import pandas as pd
-import datetime
-from dateutil.parser import parse as parse_dt
-
-from lightwood.api.dtype import dtype
-from lightwood.helpers import text
-from lightwood.helpers.log import log
-from lightwood.api.types import TimeseriesSettings
-from lightwood.helpers.numeric import is_nan_numeric
-
-from typing import Dict, List, Optional, Tuple, Callable, Union
-
-
-
[docs]def cleaner( - data: pd.DataFrame, - dtype_dict: Dict[str, str], - pct_invalid: float, - identifiers: Dict[str, str], - target: str, - mode: str, - timeseries_settings: TimeseriesSettings, - anomaly_detection: bool, - custom_cleaning_functions: Dict[str, str] = {} -) -> pd.DataFrame: - """ - The cleaner is a function which takes in the raw data, plus additional information about it's types and about the problem. Based on this it generates a "clean" representation of the data, where each column has an ideal standardized type and all malformed or otherwise missing or invalid elements are turned into ``None`` - - :param data: The raw data - :param dtype_dict: Type information for each column - :param pct_invalid: How much of each column can be invalid - :param identifiers: A dict containing all identifier typed columns - :param target: The target columns - :param mode: Can be "predict" or "train" - :param timeseries_settings: Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object - :param anomaly_detection: Are we detecting anomalies with this predictor? - - :returns: The cleaned data - """ # noqa - - data = _remove_columns(data, identifiers, target, mode, timeseries_settings, - anomaly_detection, dtype_dict) - - for col in _get_columns_to_clean(data, dtype_dict, mode, target): - - # Get and apply a cleaning function for each data type - # If you want to customize the cleaner, it's likely you can to modify ``get_cleaning_func`` - data[col] = data[col].apply(get_cleaning_func(dtype_dict[col], custom_cleaning_functions)) - - return data
- - -def _check_if_invalid(new_data: pd.Series, pct_invalid: float, col_name: str): - """ - Checks how many invalid data points there are. Invalid data points are flagged as "Nones" from the cleaning processs (see data/cleaner.py for default). - If there are too many invalid data points (specified by `pct_invalid`), then an error message will pop up. This is used as a safeguard for very messy data. - - :param new_data: data to check for invalid values. - :param pct_invalid: maximum percentage of invalid values. If this threshold is surpassed, an exception is raised. - :param col_name: name of the column to analyze. - - """ # noqa - - chk_invalid = ( - 100 - * (len(new_data) - len([x for x in new_data if x is not None])) - / len(new_data) - ) - - if chk_invalid > pct_invalid: - err = f'Too many ({chk_invalid}%) invalid values in column {col_name}nam' - log.error(err) - raise Exception(err) - - -def get_cleaning_func(data_dtype: dtype, custom_cleaning_functions: Dict[str, str]) -> Callable: - """ - For the provided data type, provide the appropriate cleaning function. Below are the defaults, users can either override this function OR impose a custom block. - - :param data_dtype: The data-type (inferred from a column) as prescribed from ``api.dtype`` - - :returns: The appropriate function that will pre-process (clean) data of specified dtype. - """ # noqa - if data_dtype in custom_cleaning_functions: - clean_func = eval(custom_cleaning_functions[data_dtype]) - - elif data_dtype in (dtype.date, dtype.datetime): - clean_func = _standardize_datetime - - elif data_dtype in (dtype.float, dtype.tsarray): - clean_func = _clean_float - - elif data_dtype in (dtype.integer): - clean_func = _clean_int - - elif data_dtype in (dtype.array): - clean_func = _standardize_array - - elif data_dtype in (dtype.tags): - clean_func = _tags_to_tuples - - elif data_dtype in (dtype.quantity): - clean_func = _clean_quantity - - elif data_dtype in ( - dtype.short_text, - dtype.rich_text, - dtype.categorical, - dtype.binary, - dtype.audio, - dtype.image, - dtype.video - ): - clean_func = _clean_text - - else: - raise ValueError(f"{data_dtype} is not supported. Check lightwood.api.dtype") - - return clean_func - - -# ------------------------- # -# Temporal Cleaning -# ------------------------- # - - -def _standardize_datetime(element: object) -> Optional[float]: - """ - Parses an expected date-time element. Intakes an element that can in theory be anything. - """ - try: - date = parse_dt(str(element)) - except Exception: - try: - date = datetime.datetime.utcfromtimestamp(element) - except Exception: - return None - - return date.timestamp() - - -# ------------------------- # -# Tags/Sequences -# ------------------------- # - -# TODO Make it split on something other than commas -def _tags_to_tuples(tags_str: str) -> Tuple[str]: - """ - Converts comma-separated values into a tuple to preserve a sequence/array. - - Ex: - >> x = 'apples, oranges, bananas' - >> _tags_to_tuples(x) - >> ('apples', 'oranges', 'bananas') - """ - try: - return tuple([x.strip() for x in tags_str.split(",")]) - except Exception: - return tuple() - - -def _standardize_array(element: object) -> Optional[Union[List[float], float]]: - """ - Given an array of numbers in the form ``[1, 2, 3, 4]``, converts into a numerical sequence. - - :param element: An array-like element in a sequence - :returns: standardized array OR scalar number IF edge case - - Ex of edge case: - >> element = [1] - >> _standardize_array(element) - >> 1 - """ - try: - element = str(element) - element = element.rstrip("]").lstrip("[") - element = element.rstrip(" ").lstrip(" ") - element = element.replace(", ", " ").replace(",", " ") - # Handles cases where arrays are numbers - if " " not in element: - element = _clean_float(element) - else: - element = [float(x) for x in element.split(" ")] - except Exception: - pass - - return element - - -# ------------------------- # -# Integers/Floats/Quantities -# ------------------------- # - -def _clean_float(element: object) -> Optional[float]: - """ - Given an element, converts it into float numeric format. If element is NaN, or inf, then returns None. - """ - try: - cleaned_float = text.clean_float(element) - if is_nan_numeric(cleaned_float): - return None - return cleaned_float - except Exception: - return None - - -def _clean_int(element: object) -> Optional[int]: - """ - Given an element, converts it into integer numeric format. If element is NaN, or inf, then returns None. - """ - element = _clean_float(element) - if element is not None: - element = int(element) - return element - - -def _clean_quantity(element: object) -> Optional[float]: - """ - Given a quantity, clean and convert it into float numeric format. If element is NaN, or inf, then returns None. - """ - element = float(re.sub("[^0-9.,]", "", str(element)).replace(",", ".")) - return _clean_float(element) - - -# ------------------------- # -# Text -# ------------------------- # -def _clean_text(element: object) -> str: - return str(element) - - -# ------------------------- # -# Other helpers -# ------------------------- # -def _rm_rows_w_empty_targets(df: pd.DataFrame, target: str) -> pd.DataFrame: - """ - Drop any rows that have targets as unknown. Targets are necessary to train. - - :param df: The input dataframe including the target value - :param target: the column name that is the output target variable - - :returns: Data with any target smissing - """ - # Compare length before/after - len_before = len(df) - - # Use Pandas ```dropna``` to omit any rows with missing values for targets; these cannot be trained - df = df.dropna(subset=[target]) - - # Compare length with after - len_after = len(df) - nr_removed = len_before - len_after - - if nr_removed != 0: - log.warning( - f"Removed {nr_removed} rows because target was missing. Training on these rows is not possible." - ) # noqa - - return df - - -def _remove_columns(data: pd.DataFrame, identifiers: Dict[str, object], target: str, - mode: str, timeseries_settings: TimeseriesSettings, anomaly_detection: bool, - dtype_dict: Dict[str, dtype]) -> pd.DataFrame: - """ - Drop columns we don't want to use in order to train or predict - - :param data: The raw data - :param dtype_dict: Type information for each column - :param identifiers: A dict containing all identifier typed columns - :param target: The target columns - :param mode: Can be "predict" or "train" - :param timeseries_settings: Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object - :param anomaly_detection: Are we detecting anomalies with this predictor? - - :returns: A (new) dataframe without the dropped columns - """ # noqa - data = deepcopy(data) - to_drop = [*[x for x in identifiers.keys() if x != target], - *[x for x in data.columns if x in dtype_dict and dtype_dict[x] == dtype.invalid]] - exceptions = ["__mdb_make_predictions"] - to_drop = [x for x in to_drop if x in data.columns] - data = data.drop(columns=to_drop) - - if mode == "train": - data = _rm_rows_w_empty_targets(data, target) - if mode == "predict": - if ( - target in data.columns - and not timeseries_settings.use_previous_target - and not anomaly_detection - ): - data = data.drop(columns=[target]) - - # Drop extra columns - for name in list(data.columns): - if name not in dtype_dict and name not in exceptions: - data = data.drop(columns=[name]) - - return data - - -def _get_columns_to_clean(data: pd.DataFrame, dtype_dict: Dict[str, dtype], mode: str, target: str) -> List[str]: - """ - :param data: The raw data - :param dtype_dict: Type information for each column - :param target: The target columns - :param mode: Can be "predict" or "train" - - :returns: A list of columns that we want to clean - """ # noqa - - cleanable_columns = [] - for name, _ in dtype_dict.items(): - if mode == "predict": - if name == target: - continue - if name in data.columns: - cleanable_columns.append(name) - return cleanable_columns -
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/data/encoded_ds.html b/docs/_modules/lightwood/data/encoded_ds.html deleted file mode 100644 index 1828dc278..000000000 --- a/docs/_modules/lightwood/data/encoded_ds.html +++ /dev/null @@ -1,409 +0,0 @@ - - - - - - - - - - lightwood.data.encoded_ds — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.data.encoded_ds
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.data.encoded_ds

-import inspect
-from typing import List, Tuple
-import torch
-import numpy as np
-import pandas as pd
-from torch.utils.data import Dataset
-from lightwood.encoder.base import BaseEncoder
-
-
-
[docs]class EncodedDs(Dataset): - def __init__(self, encoders: List[BaseEncoder], data_frame: pd.DataFrame, target: str) -> None: - """ - Create a Lightwood datasource from a data frame and some encoders. This class inherits from `torch.utils.data.Dataset`. - - Note: normal behavior is to cache encoded representations to avoid duplicated computations. If you want an option to disable, this please open an issue. - - :param encoders: list of Lightwood encoders used to encode the data per each column. - :param data_frame: original dataframe. - :param target: name of the target column to predict. - """ # noqa - self.data_frame = data_frame - self.encoders = encoders - self.target = target - self.cache_encoded = True - self.cache = [None] * len(self.data_frame) - self.encoder_spans = {} - self.input_length = 0 - - # save encoder span, has to use same iterator as in __getitem__ for correct indeces - for col in self.data_frame: - if col != self.target and self.encoders.get(col, False): - self.encoder_spans[col] = (self.input_length, - self.input_length + self.encoders[col].output_size) - self.input_length += self.encoders[col].output_size - - def __len__(self): - """ - The length of an `EncodedDs` datasource equals the amount of rows of the original dataframe. - - :return: length of the `EncodedDs` - """ - return int(self.data_frame.shape[0]) - - def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]: - """ - The getter yields a tuple (X, y), where: - - `X `is a concatenation of all encoded representations of the row - - `y` is the encoded target - - :param idx: index of the row to access. - - :return: tuple (X, y) with encoded data. - - """ # noqa - if self.cache_encoded: - if self.cache[idx] is not None: - return self.cache[idx] - - X = torch.FloatTensor() - Y = torch.FloatTensor() - for col in self.data_frame: - if self.encoders.get(col, None): - kwargs = {} - if 'dependency_data' in inspect.signature(self.encoders[col].encode).parameters: - kwargs['dependency_data'] = {dep: [self.data_frame.iloc[idx][dep]] - for dep in self.encoders[col].dependencies} - if hasattr(self.encoders[col], 'data_window'): - cols = [self.target] + [f'{self.target}_timestep_{i}' - for i in range(1, self.encoders[col].data_window)] - else: - cols = [col] - - data = self.data_frame[cols].iloc[idx].tolist() - encoded_tensor = self.encoders[col].encode(data, **kwargs)[0] - if col != self.target: - X = torch.cat([X, encoded_tensor]) - else: - Y = encoded_tensor - - if self.cache_encoded: - self.cache[idx] = (X, Y) - - return X, Y - -
[docs] def get_column_original_data(self, column_name: str) -> pd.Series: - """ - Gets the original data for any given column of the `EncodedDs`. - - :param column_name: name of the column. - :return: A `pd.Series` with the original data stored in the `column_name` column. - """ - return self.data_frame[column_name]
- -
[docs] def get_encoded_column_data(self, column_name: str) -> torch.Tensor: - """ - Gets the encoded data for any given column of the `EncodedDs`. - - :param column_name: name of the column. - :return: A `torch.Tensor` with the encoded data of the `column_name` column. - """ - kwargs = {} - if 'dependency_data' in inspect.signature(self.encoders[column_name].encode).parameters: - deps = [dep for dep in self.encoders[column_name].dependencies if dep in self.data_frame.columns] - kwargs['dependency_data'] = {dep: self.data_frame[dep].tolist() for dep in deps} - encoded_data = self.encoders[column_name].encode(self.data_frame[column_name], **kwargs) - - if not isinstance(encoded_data, torch.Tensor): - raise Exception( - f'The encoder: {self.encoders[column_name]} for column: {column_name} does not return a Tensor !') - return encoded_data
- -
[docs] def get_encoded_data(self, include_target=True) -> torch.Tensor: - """ - Gets all encoded data. - - :param include_target: whether to include the target column in the output or not. - :return: A `torch.Tensor` with the encoded dataframe. - """ - encoded_dfs = [] - for col in self.data_frame.columns: - if (include_target or col != self.target) and self.encoders.get(col, False): - encoded_dfs.append(self.get_encoded_column_data(col)) - - return torch.cat(encoded_dfs, 1)
- -
[docs] def clear_cache(self): - """ - Clears the `EncodedDs` cache. - """ - self.cache = [None] * len(self.data_frame)
- - -
[docs]class ConcatedEncodedDs(EncodedDs): - """ - `ConcatedEncodedDs` abstracts over multiple encoded datasources (`EncodedDs`) as if they were a single entity. - """ # noqa - def __init__(self, encoded_ds_arr: List[EncodedDs]) -> None: - # @TODO: missing super() call here? - self.encoded_ds_arr = encoded_ds_arr - self.encoded_ds_lenghts = [len(x) for x in self.encoded_ds_arr] - self.encoders = self.encoded_ds_arr[0].encoders - self.encoder_spans = self.encoded_ds_arr[0].encoder_spans - self.target = self.encoded_ds_arr[0].target - - def __len__(self): - """ - See `lightwood.data.encoded_ds.EncodedDs.__len__()`. - """ - # @TODO: behavior here is not intuitive - return max(0, np.sum(self.encoded_ds_lenghts) - 2) - - def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]: - """ - See `lightwood.data.encoded_ds.EncodedDs.__getitem__()`. - """ - for ds_idx, length in enumerate(self.encoded_ds_lenghts): - if idx - length < 0: - return self.encoded_ds_arr[ds_idx][idx] - else: - idx -= length - raise StopIteration() - - @property - def data_frame(self) -> pd.DataFrame: - """ - Property that concatenates all underlying `EncodedDs`'s dataframes and returns them. - - Note: be careful to not modify a `ConcatedEncodedDs`, as you can see in the source, it will not have an effect. - - :return: Dataframe with all original data. - """ # noqa - return pd.concat([x.data_frame for x in self.encoded_ds_arr]) - -
[docs] def get_column_original_data(self, column_name: str) -> pd.Series: - """ - See `lightwood.data.encoded_ds.EncodedDs.get_column_original_data()`. - """ - encoded_df_arr = [x.get_column_original_data(column_name) for x in self.encoded_ds_arr] - return pd.concat(encoded_df_arr)
- -
[docs] def get_encoded_column_data(self, column_name: str) -> torch.Tensor: - """ - See `lightwood.data.encoded_ds.EncodedDs.get_encoded_column_data()`. - """ - encoded_df_arr = [x.get_encoded_column_data(column_name) for x in self.encoded_ds_arr] - return torch.cat(encoded_df_arr, 0)
- -
[docs] def clear_cache(self): - """ - See `lightwood.data.encoded_ds.EncodedDs.clear_cache()`. - """ - for ds in self.encoded_ds_arr: - ds.clear_cache()
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/data/splitter.html b/docs/_modules/lightwood/data/splitter.html deleted file mode 100644 index b0976c547..000000000 --- a/docs/_modules/lightwood/data/splitter.html +++ /dev/null @@ -1,361 +0,0 @@ - - - - - - - - - - lightwood.data.splitter — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.data.splitter
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.data.splitter

-from typing import List, Dict
-from itertools import product
-
-import numpy as np
-import pandas as pd
-
-from lightwood.helpers.log import log
-from lightwood.api.dtype import dtype
-from lightwood.api.types import TimeseriesSettings
-
-
-
[docs]def splitter( - data: pd.DataFrame, - tss: TimeseriesSettings, - dtype_dict: Dict[str, str], - seed: int, - pct_train: float, - pct_dev: float, - pct_test: float, - target: str -) -> Dict[str, pd.DataFrame]: - """ - Splits data into training, dev and testing datasets. - - The proportion of data for each split must be specified (JSON-AI sets defaults to 80/10/10). First, rows in the dataset are shuffled randomly. Then a simple split is done. If a target value is provided and is of data type categorical/binary, then the splits will be stratified to maintain the representative populations of each class. - - :param data: Input dataset to be split - :param tss: time-series specific details for splitting - :param dtype_dict: Dictionary with the data type of all columns - :param seed: Random state for pandas data-frame shuffling - :param pct_train: training fraction of data; must be less than 1 - :param pct_dev: dev fraction of data; must be less than 1 - :param pct_test: testing fraction of data; must be less than 1 - :param target: Name of the target column; if specified, data will be stratified on this column - - :returns: A dictionary containing the keys train, test and dev with their respective data frames, as well as the "stratified_on" key indicating which columns the data was stratified on (None if it wasn't stratified on anything) - """ # noqa - pct_sum = pct_train + pct_dev + pct_test - if not (np.isclose(pct_sum, 1, atol=0.001) and np.less(pct_sum, 1 + 1e-5)): - raise Exception(f'The train, dev and test percentage of the data needs to sum up to 1 (got {pct_sum})') - - # Shuffle the data - np.random.seed(seed) - if not tss.is_timeseries: - data = data.sample(frac=1, random_state=seed).reset_index(drop=True) - - # Check if stratification should be done - stratify_on = [] - if target is not None: - if dtype_dict[target] in (dtype.categorical, dtype.binary) and not tss.is_timeseries: - stratify_on = [target] - if tss.is_timeseries and isinstance(tss.group_by, list): - stratify_on = tss.group_by - - # Split the data - if stratify_on: - reshuffle = not tss.is_timeseries - train, dev, test = stratify(data, pct_train, pct_dev, pct_test, stratify_on, seed, reshuffle) - else: - train, dev, test = simple_split(data, pct_train, pct_dev, pct_test) - - return {"train": train, "test": test, "dev": dev, "stratified_on": stratify_on}
- - -def simple_split(data: pd.DataFrame, - pct_train: float, - pct_dev: float, - pct_test: float) -> List[pd.DataFrame]: - """ - Simple split method to separate data into training, dev and testing datasets. - - :param data: Input dataset to be split - :param pct_train: training fraction of data; must be less than 1 - :param pct_dev: dev fraction of data; must be less than 1 - :param pct_test: testing fraction of data; must be less than 1 - - :returns Train, dev, and test dataframes - """ - train_cutoff = round(data.shape[0] * pct_train) - dev_cutoff = round(data.shape[0] * pct_dev) + train_cutoff - test_cutoff = round(data.shape[0] * pct_test) + dev_cutoff - - train = data[:train_cutoff] - dev = data[train_cutoff:dev_cutoff] - test = data[dev_cutoff:test_cutoff] - - return [train, dev, test] - - -def stratify(data: pd.DataFrame, - pct_train: float, - pct_dev: float, - pct_test: float, - stratify_on: List[str], - seed: int, - reshuffle: bool) -> List[pd.DataFrame]: - """ - Stratified data splitter. - - The `stratify_on` columns yield a cartesian product by which every different subset will be stratified - independently from the others, and recombined at the end in fractions specified by `pcts`. - - For grouped time series tasks, stratification is done based on the group-by columns. - - :param data: dataframe with data to be split - :param pct_train: fraction of data to use for training split - :param pct_dev: fraction of data to use for dev split (used internally by mixers) - :param pct_test: fraction of data to use for test split (used post-training for analysis) - :param stratify_on: Columns to consider when stratifying - :param seed: Random state for pandas data-frame shuffling - :param reshuffle: specify if reshuffling should be done post-split - - :returns Stratified train, dev, test dataframes - """ # noqa - - train_st = pd.DataFrame(columns=data.columns) - dev_st = pd.DataFrame(columns=data.columns) - test_st = pd.DataFrame(columns=data.columns) - - all_group_combinations = list(product(*[data[col].unique() for col in stratify_on])) - for group in all_group_combinations: - df = data - for idx, col in enumerate(stratify_on): - df = df[df[col] == group[idx]] - - train_cutoff = round(df.shape[0] * pct_train) - dev_cutoff = round(df.shape[0] * pct_dev) + train_cutoff - test_cutoff = round(df.shape[0] * pct_test) + dev_cutoff - - train_st = train_st.append(df[:train_cutoff]) - dev_st = dev_st.append(df[train_cutoff:dev_cutoff]) - test_st = test_st.append(df[dev_cutoff:test_cutoff]) - - if reshuffle: - train_st, dev_st, test_st = [df.sample(frac=1, random_state=seed).reset_index(drop=True) - for df in [train_st, dev_st, test_st]] - - # check that stratified lengths conform to expected percentages - if not np.isclose(len(train_st) / len(data), pct_train, atol=0.01) or \ - not np.isclose(len(dev_st) / len(data), pct_dev, atol=0.01) or \ - not np.isclose(len(test_st) / len(data), pct_test, atol=0.01): - log.info("Could not stratify; reverting to simple split") - train_st, dev_st, test_st = simple_split(data, pct_train, pct_dev, pct_test) - - return [train_st, dev_st, test_st] -
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/data/timeseries_analyzer.html b/docs/_modules/lightwood/data/timeseries_analyzer.html deleted file mode 100644 index b07328010..000000000 --- a/docs/_modules/lightwood/data/timeseries_analyzer.html +++ /dev/null @@ -1,349 +0,0 @@ - - - - - - - - - - lightwood.data.timeseries_analyzer — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.data.timeseries_analyzer
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.data.timeseries_analyzer

-from typing import Dict, Tuple, List
-
-import numpy as np
-import pandas as pd
-
-from lightwood.api.types import TimeseriesSettings
-from lightwood.api.dtype import dtype
-from lightwood.encoder.time_series.helpers.common import generate_target_group_normalizers
-from lightwood.helpers.general import get_group_matches
-
-
-
[docs]def timeseries_analyzer(data: pd.DataFrame, dtype_dict: Dict[str, str], - timeseries_settings: TimeseriesSettings, target: str) -> Dict: - """ - This module analyzes (pre-processed) time series data and stores a few useful insights used in the rest of Lightwood's pipeline. - - :param data: dataframe with time series dataset. - :param dtype_dict: dictionary with inferred types for every column. - :param timeseries_settings: A `TimeseriesSettings` object. For more details, check `lightwood.types.TimeseriesSettings`. - :param target: name of the target column. - - The following things are extracted from each time series inside the dataset: - - group_combinations: all observed combinations of values for the set of `group_by` columns. The length of this list determines how many time series are in the data. - - deltas: inferred sampling interval - - ts_naive_residuals: Residuals obtained from the data by a naive forecaster that repeats the last-seen value. - - ts_naive_mae: Mean residual value obtained from the data by a naive forecaster that repeats the last-seen value. - - target_normalizers: objects that may normalize the data within any given time series for effective learning. See `lightwood.encoder.time_series.helpers.common` for available choices. - - :return: Dictionary with the aforementioned insights and the `TimeseriesSettings` object for future references. - """ # noqa - info = { - 'original_type': dtype_dict[target], - 'data': data[target].values - } - if timeseries_settings.group_by is not None: - info['group_info'] = {gcol: data[gcol].tolist() for gcol in timeseries_settings.group_by} # group col values - else: - info['group_info'] = {} - - # @TODO: maybe normalizers should fit using only the training subsets?? - new_data = generate_target_group_normalizers(info) - - if dtype_dict[target] in (dtype.integer, dtype.float, dtype.tsarray): - naive_forecast_residuals, scale_factor = get_grouped_naive_residuals(info, new_data['group_combinations']) - else: - naive_forecast_residuals, scale_factor = {}, {} - - deltas = get_delta(data[timeseries_settings.order_by], - info, - new_data['group_combinations'], - timeseries_settings.order_by) - - return {'target_normalizers': new_data['target_normalizers'], - 'deltas': deltas, - 'tss': timeseries_settings, - 'group_combinations': new_data['group_combinations'], - 'ts_naive_residuals': naive_forecast_residuals, - 'ts_naive_mae': scale_factor - }
- - -def get_delta(df: pd.DataFrame, ts_info: dict, group_combinations: list, order_cols: list) -> Dict[str, Dict]: - """ - Infer the sampling interval of each time series, by picking the most popular time interval observed in the training data. - - :param df: Dataframe with time series data. - :param ts_info: Dictionary used internally by `timeseries_analyzer`. Contains group-wise series information, among other things. - :param group_combinations: all tuples with distinct values for `TimeseriesSettings.group_by` columns, defining all available time series. - :param order_cols: all columns specified in `TimeseriesSettings.order_by`. - - :return: - Dictionary with group combination tuples as keys. Values are dictionaries with the inferred delta for each series, for each `order_col`. - """ # noqa - deltas = {"__default": {}} - - # get default delta for all data - for col in order_cols: - series = pd.Series([x[-1] for x in df[col]]) - rolling_diff = series.rolling(window=2).apply(lambda x: x.iloc[1] - x.iloc[0]) - delta = rolling_diff.value_counts(ascending=False).keys()[0] # pick most popular - deltas["__default"][col] = delta - - # get group-wise deltas (if applicable) - if ts_info.get('group_info', False): - original_data = ts_info['data'] - for group in group_combinations: - if group != "__default": - deltas[group] = {} - for col in order_cols: - ts_info['data'] = pd.Series([x[-1] for x in df[col]]) - _, subset = get_group_matches(ts_info, group) - if subset.size > 1: - rolling_diff = pd.Series( - subset.squeeze()).rolling( - window=2).apply( - lambda x: x.iloc[1] - x.iloc[0]) - delta = rolling_diff.value_counts(ascending=False).keys()[0] - deltas[group][col] = delta - ts_info['data'] = original_data - - return deltas - - -def get_naive_residuals(target_data: pd.DataFrame, m: int = 1) -> Tuple[List, float]: - """ - Computes forecasting residuals for the naive method (forecasts for time `t` is the value observed at `t-1`). - Useful for computing MASE forecasting error. - - Note: method assumes predictions are all for the same group combination. For a dataframe that contains multiple - series, use `get_grouped_naive_resiudals`. - - :param target_data: observed time series targets - :param m: season length. the naive forecasts will be the m-th previously seen value for each series - - :return: (list of naive residuals, average residual value) - """ # noqa - residuals = target_data.rolling(window=m + 1).apply(lambda x: abs(x.iloc[m] - x.iloc[0]))[m:].values.flatten() - scale_factor = np.average(residuals) - return residuals.tolist(), scale_factor - - -def get_grouped_naive_residuals(info: Dict, group_combinations: List) -> Tuple[Dict, Dict]: - """ - Wraps `get_naive_residuals` for a dataframe with multiple co-existing time series. - """ # noqa - group_residuals = {} - group_scale_factors = {} - for group in group_combinations: - idxs, subset = get_group_matches(info, group) - residuals, scale_factor = get_naive_residuals(pd.DataFrame(subset)) # @TODO: pass m once we handle seasonality - group_residuals[group] = residuals - group_scale_factors[group] = scale_factor - return group_residuals, group_scale_factors -
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/data/timeseries_transform.html b/docs/_modules/lightwood/data/timeseries_transform.html deleted file mode 100644 index 2ed3dc2db..000000000 --- a/docs/_modules/lightwood/data/timeseries_transform.html +++ /dev/null @@ -1,562 +0,0 @@ - - - - - - - - - - lightwood.data.timeseries_transform — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.data.timeseries_transform
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.data.timeseries_transform

-import copy
-import datetime
-import dateutil
-import numpy as np
-import pandas as pd
-import multiprocessing as mp
-from lightwood.helpers.parallelism import get_nr_procs
-from functools import partial
-from typing import Dict
-from lightwood.api.types import TimeseriesSettings
-from lightwood.helpers.log import log
-from lightwood.api import dtype
-
-
-
[docs]def transform_timeseries( - data: pd.DataFrame, dtype_dict: Dict[str, str], - timeseries_settings: TimeseriesSettings, target: str, mode: str) -> pd.DataFrame: - """ - Block that transforms the dataframe of a time series task to a convenient format for use in posterior phases like model training. - - The main transformations performed by this block are: - - Type casting (e.g. to numerical for `order_by` columns). - - Windowing functions for historical context based on `TimeseriesSettings.window` parameter. - - Explicitly add target columns according to the `TimeseriesSettings.nr_predictions` parameter. - - Flag all rows that are "predictable" based on all `TimeseriesSettings`. - - Plus, handle all logic for the streaming use case (where forecasts are only emitted for the last observed data point). - - :param data: Dataframe with data to transform. - :param dtype_dict: Dictionary with the types of each column. - :param timeseries_settings: A `TimeseriesSettings` object. - :param target: The name of the target column to forecast. - :param mode: Either "train" or "predict", depending on what phase is calling this procedure. - - :return: A dataframe with all the transformations applied. - """ # noqa - - tss = timeseries_settings - original_df = copy.deepcopy(data) - gb_arr = tss.group_by if tss.group_by is not None else [] - ob_arr = tss.order_by - window = tss.window - - if '__mdb_make_predictions' in original_df.columns: - index = original_df[original_df['__mdb_make_predictions'].map( - {'True': True, 'False': False, True: True, False: False}).isin([True])] - infer_mode = index.shape[0] == 0 # condition to trigger: __mdb_make_predictions is set to False everywhere - # @TODO: dont drop and use instead of original_index? - original_df = original_df.reset_index(drop=True) if infer_mode else original_df - else: - infer_mode = False - - original_index_list = [] - idx = 0 - for row in original_df.itertuples(): - if _make_pred(row) or infer_mode: - original_index_list.append(idx) - idx += 1 - else: - original_index_list.append(None) - - original_df['original_index'] = original_index_list - - secondary_type_dict = {} - for col in ob_arr: - if dtype_dict[col] in (dtype.date, dtype.integer, dtype.float): - secondary_type_dict[col] = dtype_dict[col] - - # Convert order_by columns to numbers (note, rows are references to mutable rows in `original_df`) - for _, row in original_df.iterrows(): - for col in ob_arr: - # @TODO: Remove if the TS encoder can handle `None` - if row[col] is None or pd.isna(row[col]): - row[col] = 0.0 - else: - if dtype_dict[col] == dtype.date: - try: - row[col] = dateutil.parser.parse( - row[col], - **{} - ) - except (TypeError, ValueError): - pass - - if isinstance(row[col], datetime.datetime): - row[col] = row[col].timestamp() - - try: - row[col] = float(row[col]) - except ValueError: - raise ValueError(f'Failed to order based on column: "{col}" due to faulty value: {row[col]}') - - for oby in tss.order_by: - original_df[f'__mdb_original_{oby}'] = original_df[oby] - - group_lengths = [] - if len(gb_arr) > 0: - df_arr = [] - for _, df in original_df.groupby(gb_arr): - df_arr.append(df.sort_values(by=ob_arr)) - group_lengths.append(len(df)) - else: - df_arr = [original_df] - group_lengths.append(len(original_df)) - - n_groups = len(df_arr) - last_index = original_df['original_index'].max() - for i, subdf in enumerate(df_arr): - if '__mdb_make_predictions' in subdf.columns and mode == 'predict': - if infer_mode: - df_arr[i] = _ts_infer_next_row(subdf, ob_arr, last_index) - last_index += 1 - - if len(original_df) > 500: - # @TODO: restore possibility to override this with args - nr_procs = get_nr_procs(original_df) - log.info(f'Using {nr_procs} processes to reshape.') - pool = mp.Pool(processes=nr_procs) - # Make type `object` so that dataframe cells can be python lists - df_arr = pool.map(partial(_ts_to_obj, historical_columns=ob_arr + tss.historical_columns), df_arr) - df_arr = pool.map(partial(_ts_order_col_to_cell_lists, - order_cols=ob_arr + tss.historical_columns), df_arr) - df_arr = pool.map( - partial( - _ts_add_previous_rows, order_cols=ob_arr + tss.historical_columns, window=window), - df_arr) - - df_arr = pool.map(partial(_ts_add_future_target, target=target, nr_predictions=tss.nr_predictions, - data_dtype=tss.target_type, mode=mode), - df_arr) - - if tss.use_previous_target: - df_arr = pool.map( - partial(_ts_add_previous_target, target=target, window=tss.window), - df_arr) - pool.close() - pool.join() - else: - for i in range(n_groups): - df_arr[i] = _ts_to_obj(df_arr[i], historical_columns=ob_arr + tss.historical_columns) - df_arr[i] = _ts_order_col_to_cell_lists(df_arr[i], order_cols=ob_arr + tss.historical_columns) - df_arr[i] = _ts_add_previous_rows(df_arr[i], - order_cols=ob_arr + tss.historical_columns, window=window) - df_arr[i] = _ts_add_future_target(df_arr[i], target=target, nr_predictions=tss.nr_predictions, - data_dtype=tss.target_type, mode=mode) - if tss.use_previous_target: - df_arr[i] = _ts_add_previous_target(df_arr[i], target=target, window=tss.window) - - combined_df = pd.concat(df_arr) - - if '__mdb_make_predictions' in combined_df.columns: - combined_df = pd.DataFrame(combined_df[combined_df['__mdb_make_predictions'].astype(bool).isin([True])]) - del combined_df['__mdb_make_predictions'] - - if not infer_mode and any([i < tss.window for i in group_lengths]): - if tss.allow_incomplete_history: - log.warning("Forecasting with incomplete historical context, predictions might be subpar") - else: - raise Exception(f'Not enough historical context to make a timeseries prediction. Please provide a number of rows greater or equal to the window size. If you can\'t get enough rows, consider lowering your window size. If you want to force timeseries predictions lacking historical context please set the `allow_incomplete_history` timeseries setting to `True`, but this might lead to subpar predictions.') # noqa - - df_gb_map = None - if n_groups > 1: - df_gb_list = list(combined_df.groupby(tss.group_by)) - df_gb_map = {} - for gb, df in df_gb_list: - df_gb_map['_' + '_'.join(gb)] = df - - timeseries_row_mapping = {} - idx = 0 - - if df_gb_map is None: - for _, row in combined_df.iterrows(): - if not infer_mode: - timeseries_row_mapping[idx] = int( - row['original_index']) if row['original_index'] is not None and not np.isnan( - row['original_index']) else None - else: - timeseries_row_mapping[idx] = idx - idx += 1 - else: - for gb in df_gb_map: - for _, row in df_gb_map[gb].iterrows(): - if not infer_mode: - timeseries_row_mapping[idx] = int( - row['original_index']) if row['original_index'] is not None and not np.isnan( - row['original_index']) else None - else: - timeseries_row_mapping[idx] = idx - - idx += 1 - - del combined_df['original_index'] - - # return combined_df, secondary_type_dict, timeseries_row_mapping, df_gb_map - return combined_df
- - -def _ts_infer_next_row(df: pd.DataFrame, ob: str, last_index: int) -> pd.DataFrame: - """ - Adds an inferred next row for streaming mode purposes. - - :param df: dataframe from which next row is inferred. - :param ob: `order_by` column. - :param last_index: index number of the latest row in `df`. - - :return: Modified `df` with the inferred row appended to it. - """ - last_row = df.iloc[[-1]].copy() - if df.shape[0] > 1: - butlast_row = df.iloc[[-2]] - delta = (last_row[ob].values - butlast_row[ob].values).flatten()[0] - else: - delta = 1 - last_row.original_index = None - last_row.index = [last_index + 1] - last_row['__mdb_make_predictions'] = True - last_row['__mdb_ts_inferred'] = True - last_row[ob] += delta - return df.append(last_row) - - -def _make_pred(row) -> bool: - """ - Indicates whether a prediction should be made for `row` or not. - """ - return not hasattr(row, '__mdb_make_predictions') or row.make_predictions - - -def _ts_to_obj(df: pd.DataFrame, historical_columns: list) -> pd.DataFrame: - """ - Casts all historical columns in a dataframe to `object` type. - - :param df: Input dataframe - :param historical_columns: Historical columns to type cast - - :return: Dataframe with `object`-typed historical columns - """ - for hist_col in historical_columns: - df.loc[:, hist_col] = df[hist_col].astype(object) - return df - - -def _ts_order_col_to_cell_lists(df: pd.DataFrame, order_cols: list) -> pd.DataFrame: - """ - Casts all data in `order_by` columns into cells. - - :param df: Input dataframe - :param order_cols: `order_by` columns - - :return: Dataframe with all `order_cols` modified so that their values are cells, e.g. `1` -> `[1]` - """ - for order_col in order_cols: - for ii in range(len(df)): - label = df.index.values[ii] - df.at[label, order_col] = [df.at[label, order_col]] - return df - - -def _ts_add_previous_rows(df: pd.DataFrame, order_cols: list, window: int) -> pd.DataFrame: - """ - Adds previous rows (as determined by `TimeseriesSettings.window`) into the cells of all `order_by` columns. - - :param df: Input dataframe. - :param order_cols: `order_by` columns. - :param window: value of `TimeseriesSettings.window` parameter. - - :return: Dataframe with all `order_cols` modified so that their values are now arrays of historical context. - """ # noqa - for order_col in order_cols: - for i in range(len(df)): - previous_indexes = [*range(max(0, i - window), i)] - - for prev_i in reversed(previous_indexes): - df.iloc[i][order_col].append( - df.iloc[prev_i][order_col][-1] - ) - - # Zero pad - # @TODO: Remove since RNN encoder can do without (???) - df.iloc[i][order_col].extend( - [0] * (1 + window - len(df.iloc[i][order_col])) - ) - df.iloc[i][order_col].reverse() - return df - - -def _ts_add_previous_target(df: pd.DataFrame, target: str, window: int) -> pd.DataFrame: - """ - Adds previous rows (as determined by `TimeseriesSettings.window`) into the cells of the target column. - - :param df: Input dataframe. - :param target: target column name. - :param window: value of `TimeseriesSettings.window` parameter. - - :return: Dataframe with new `__mdb_ts_previous_{target}` column that contains historical target context. - """ # noqa - if target not in df: - return df - previous_target_values = list(df[target]) - del previous_target_values[-1] - previous_target_values = [None] + previous_target_values - - previous_target_values_arr = [] - for i in range(len(previous_target_values)): - prev_vals = previous_target_values[max(i - window, 0):i + 1] - arr = [None] * (window - len(prev_vals) + 1) - arr.extend(prev_vals) - previous_target_values_arr.append(arr) - - df[f'__mdb_ts_previous_{target}'] = previous_target_values_arr - return df - - -def _ts_add_future_target(df, target, nr_predictions, data_dtype, mode): - """ - Adds as many columns to the input dataframe as the forecasting horizon asks for (as determined by `TimeseriesSettings.nr_predictions`). - - :param df: Input dataframe. - :param target: target column name. - :param nr_predictions: value of `TimeseriesSettings.nr_predictions` parameter. - :param data_dtype: dictionary with types of all input columns - :param mode: either "train" or "predict". `Train` will drop rows with incomplet target info. `Predict` has no effect, for now. - - :return: Dataframe with new `{target}_timestep_{i}'` columns that contains target labels at timestep `i` of a total `TimeseriesSettings.nr_predictions`. - """ # noqa - if target not in df: - return df - if data_dtype in (dtype.integer, dtype.float, dtype.array, dtype.tsarray): - df[target] = df[target].astype(float) - - for timestep_index in range(1, nr_predictions): - next_target_value_arr = list(df[target]) - for del_index in range(0, min(timestep_index, len(next_target_value_arr))): - del next_target_value_arr[0] - next_target_value_arr.append(None) - col_name = f'{target}_timestep_{timestep_index}' - df[col_name] = next_target_value_arr - df[col_name] = df[col_name].fillna(value=np.nan) - - # drop rows with incomplete target info. - if mode == 'train': - for col in [f'{target}_timestep_{i}' for i in range(1, nr_predictions)]: - if '__mdb_make_predictions' not in df.columns: - df['__mdb_make_predictions'] = True - df.loc[df[col].isna(), ['__mdb_make_predictions']] = False - - return df -
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/array/array.html b/docs/_modules/lightwood/encoder/array/array.html deleted file mode 100644 index 46ed1afa5..000000000 --- a/docs/_modules/lightwood/encoder/array/array.html +++ /dev/null @@ -1,305 +0,0 @@ - - - - - - - - - - lightwood.encoder.array.array — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.array.array
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.array.array

-from typing import List, Union
-import torch
-import pandas as pd
-import numpy as np
-from lightwood.encoder.base import BaseEncoder
-from lightwood.api import dtype
-from lightwood.encoder.helpers import MinMaxNormalizer, CatNormalizer
-from lightwood.helpers.general import is_none
-
-
-
[docs]class ArrayEncoder(BaseEncoder): - """ - Fits a normalizer for array data. To encode, `ArrayEncoder` returns a normalized window of previous data. - It can be used for generic arrays, as well as for handling historical target values in time series tasks. - - Currently supported normalizing strategies are minmax for numerical arrays, and a simple one-hot for categorical arrays. See `lightwood.encoder.helpers` for more details on each approach. - - :param stop_after: time budget in seconds. - :param window: expected length of array data. - :param original_dtype: element-wise data type - """ # noqa - - is_trainable_encoder: bool = True - - def __init__(self, stop_after: int, window: int = None, is_target: bool = False, original_type: dtype = None): - super().__init__(is_target) - self.stop_after = stop_after - self.original_type = original_type - self._normalizer = None - if window is not None: - self.output_size = window + 1 - else: - self.output_size = None - - def _pad_and_strip(self, array: List[object]): - if len(array) < self.output_size: - array = array + [0] * (self.output_size - len(array)) - if len(array) > self.output_size: - array = array[:self.output_size] - return array - - def prepare(self, train_priming_data, dev_priming_data): - priming_data = pd.concat([train_priming_data, dev_priming_data]) - priming_data = priming_data.values - - if self.output_size is None: - self.output_size = np.max([len(x) for x in priming_data if x is not None]) - for i in range(len(priming_data)): - if is_none(priming_data[i]): - priming_data[i] = [0] * self.output_size - - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - if self.original_type in (dtype.categorical, dtype.binary): - self._normalizer = CatNormalizer(encoder_class='ordinal') - else: - self._normalizer = MinMaxNormalizer() - - if isinstance(priming_data, pd.Series): - priming_data = priming_data.values - - priming_data = [self._pad_and_strip(list(x)) for x in priming_data] - - self._normalizer.prepare(priming_data) - self.output_size *= self._normalizer.output_size - self.is_prepared = True - - def encode(self, column_data: Union[list, np.ndarray, torch.Tensor]) -> torch.Tensor: - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - if isinstance(column_data, pd.Series): - column_data = column_data.values - - for i in range(len(column_data)): - if is_none(column_data[i]): - column_data[i] = [0] * self.output_size - column_data = [self._pad_and_strip(list(x)) for x in column_data] - - data = torch.cat([self._normalizer.encode(column_data)], dim=-1) - data[torch.isnan(data)] = 0.0 - data[torch.isinf(data)] = 0.0 - - return data - - def decode(self, data) -> torch.tensor: - decoded = data.tolist() - return decoded
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/base.html b/docs/_modules/lightwood/encoder/base.html deleted file mode 100644 index d6f37c120..000000000 --- a/docs/_modules/lightwood/encoder/base.html +++ /dev/null @@ -1,275 +0,0 @@ - - - - - - - - - - lightwood.encoder.base — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.base
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.base

-from typing import List
-import torch
-
-
-
[docs]class BaseEncoder: - """ - Base class for all encoders. - - An encoder should return encoded representations of any columnar data. - The procedure for this is defined inside the `encode()` method. - - If this encoder is expected to handle an output column, then it also needs to implement the respective `decode()` method that handles the inverse transformation from encoded representations to the final prediction in the original column space. - - For encoders that learn representations (as opposed to rule-based), the `prepare()` method will handle all learning logic. - - The `to()` method is used to move PyTorch-based encoders to and from a GPU. - - :param is_target: Whether the data to encode is the target, as per the problem definition. - :param is_timeseries_encoder: Whether encoder represents sequential/time-series data. Lightwood must provide specific treatment for this kind of encoder - :param is_trainable_encoder: Whether the encoder must return learned representations. Lightwood checks whether this flag is present in order to pass data to the feature representation via the ``prepare`` statement. - - Class Attributes: - - is_prepared: Internal flag to signal that the `prepare()` method has been successfully executed. - - is_nn_encoder: Whether the encoder is neural network-based. - - dependencies: list of additional columns that the encoder might need to encode. - - output_size: length of each encoding tensor for a single data point. - - """ # noqa - is_target: bool - is_prepared: bool - - is_timeseries_encoder: bool = False - is_trainable_encoder: bool = False - - def __init__(self, is_target=False) -> None: - self.is_target = is_target - self.is_prepared = False - self.dependencies = [] - self.output_size = None - - # Not all encoders need to be prepared - def prepare(self, priming_data) -> None: - self.is_prepared = True - - def encode(self, column_data) -> torch.Tensor: - raise NotImplementedError - - def decode(self, encoded_data) -> List[object]: - raise NotImplementedError - - # Should work for all torch-based encoders, but custom behavior may have to be implemented for weird models - def to(self, device, available_devices): - # Find all nn.Module type objects and convert them - # @TODO: Make this work recursively - for v in vars(self): - attr = getattr(self, v) - if isinstance(attr, torch.nn.Module): - attr.to(device) - return self
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/categorical/autoencoder.html b/docs/_modules/lightwood/encoder/categorical/autoencoder.html deleted file mode 100644 index c8b60f587..000000000 --- a/docs/_modules/lightwood/encoder/categorical/autoencoder.html +++ /dev/null @@ -1,323 +0,0 @@ - - - - - - - - - - lightwood.encoder.categorical.autoencoder — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.categorical.autoencoder
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.categorical.autoencoder

-import random
-import numpy as np
-import torch
-from torch.utils.data import DataLoader
-from lightwood.mixer.helpers.ranger import Ranger
-from lightwood.encoder.categorical.onehot import OneHotEncoder
-from lightwood.encoder.categorical.gym import Gym
-from lightwood.encoder.base import BaseEncoder
-from lightwood.helpers.log import log
-from lightwood.mixer.helpers.default_net import DefaultNet
-import pandas as pd
-
-
-
[docs]class CategoricalAutoEncoder(BaseEncoder): - is_trainable_encoder: bool = True - - def __init__(self, stop_after: int = 3600, is_target: bool = False, max_encoded_length: int = 100): - super().__init__(is_target) - self.is_prepared = False - self.name = 'Categorical Autoencoder' - self.net = None - self.encoder = None - self.decoder = None - self.onehot_encoder = OneHotEncoder(is_target=self.is_target) - self.desired_error = 0.01 - self.stop_after = stop_after - # @TODO stop using instead of ONEHOT !!!@! - self.output_size = None - self.max_encoded_length = max_encoded_length - - def _encoder_targets(self, data): - oh_encoded_categories = self.onehot_encoder.encode(data) - target = oh_encoded_categories.cpu().numpy() - target_indexes = np.where(target > 0)[1] - targets_c = torch.LongTensor(target_indexes) - labels = targets_c.to(self.net.device) - return labels - - def prepare(self, train_priming_data, dev_priming_data): - priming_data = pd.concat([train_priming_data, dev_priming_data]) - random.seed(len(priming_data)) - - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - self.onehot_encoder.prepare(priming_data) - - input_len = self.onehot_encoder._lang.n_words - - if self.is_target: - log.warning('You are trying to use an autoencoder for the target value! \ - This is very likely a bad idea') - log.info('Preparing a categorical autoencoder, this might take a while') - - embeddings_layer_len = self.max_encoded_length - - self.net = DefaultNet(shape=[input_len, embeddings_layer_len, input_len]) - - criterion = torch.nn.CrossEntropyLoss() - optimizer = Ranger(self.net.parameters()) - - gym = Gym(model=self.net, optimizer=optimizer, scheduler=None, loss_criterion=criterion, - device=self.net.device, name=self.name, input_encoder=self.onehot_encoder.encode, - output_encoder=self._encoder_targets) - - batch_size = min(200, int(len(priming_data) / 50)) - - priming_data_str = [str(x) for x in priming_data] - train_data_loader = DataLoader( - list(zip(priming_data_str, priming_data_str)), - batch_size=batch_size, shuffle=True) - - test_data_loader = None - - best_model, _, _ = gym.fit(train_data_loader, - test_data_loader, - desired_error=self.desired_error, - max_time=self.stop_after, - eval_every_x_epochs=1, - max_unimproving_models=5) - - self.net = best_model.to(self.net.device) - - modules = [module for module in self.net.modules() if type( - module) != torch.nn.Sequential and type(module) != DefaultNet] - self.encoder = torch.nn.Sequential(*modules[0:2]).eval() - self.decoder = torch.nn.Sequential(*modules[2:3]).eval() - log.info('Categorical autoencoder ready') - - self.output_size = self.onehot_encoder._lang.n_words - self.output_size = self.max_encoded_length - self.is_prepared = True - - def encode(self, column_data): - oh_encoded_tensor = self.onehot_encoder.encode(column_data) - - with torch.no_grad(): - oh_encoded_tensor = oh_encoded_tensor.to(self.net.device) - embeddings = self.encoder(oh_encoded_tensor) - return embeddings.to('cpu') - - def decode(self, encoded_data): - with torch.no_grad(): - encoded_data = encoded_data.to(self.net.device) - oh_encoded_tensor = self.decoder(encoded_data) - oh_encoded_tensor = oh_encoded_tensor.to('cpu') - return self.onehot_encoder.decode(oh_encoded_tensor)
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/categorical/binary.html b/docs/_modules/lightwood/encoder/categorical/binary.html deleted file mode 100644 index 4f9a25d8d..000000000 --- a/docs/_modules/lightwood/encoder/categorical/binary.html +++ /dev/null @@ -1,287 +0,0 @@ - - - - - - - - - - lightwood.encoder.categorical.binary — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.categorical.binary
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.categorical.binary

-import torch
-import numpy as np
-from scipy.special import softmax
-from lightwood.encoder.base import BaseEncoder
-
-
-# Exists mainly for datasets with loads of binary flags where OHE can be too slow to fit
-
[docs]class BinaryEncoder(BaseEncoder): - - def __init__(self, is_target=False, target_class_distribution=None): - super().__init__(is_target) - self.map = {} - self.rev_map = {} - self.output_size = 2 - if self.is_target: - self.target_class_distribution = target_class_distribution - self.index_weights = None - - def prepare(self, priming_data): - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - for x in priming_data: - x = str(x) - if x not in self.map: - self.map[x] = len(self.map) - self.rev_map[len(self.rev_map)] = x - - if len(self.map) == 2: - break - - if self.is_target: - self.index_weights = [None, None] - for word in self.map: - if self.target_class_distribution is not None: - self.index_weights[self.map[word]] = 1 / self.target_class_distribution[word] - else: - self.index_weights[self.map[word]] = 1 - - self.index_weights = torch.Tensor(self.index_weights) - - self.is_prepared = True - - def encode(self, column_data): - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - ret = [] - - for word in column_data: - index = self.map.get(word, None) - ret.append([0, 0]) - if index is not None: - ret[-1][index] = 1 - - return torch.Tensor(ret) - - def decode(self, encoded_data, return_raw=False): - encoded_data_list = encoded_data.tolist() - ret = [] - probs = [] - - for vector in encoded_data_list: - ret.append(self.rev_map[np.argmax(vector)]) - - if return_raw: - probs.append(softmax(vector).tolist()) - - if return_raw: - return ret, probs, self.rev_map - else: - return ret
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/categorical/multihot.html b/docs/_modules/lightwood/encoder/categorical/multihot.html deleted file mode 100644 index 1ed7045a3..000000000 --- a/docs/_modules/lightwood/encoder/categorical/multihot.html +++ /dev/null @@ -1,254 +0,0 @@ - - - - - - - - - - lightwood.encoder.categorical.multihot — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.categorical.multihot
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.categorical.multihot

-import torch
-import numpy as np
-from lightwood.encoder import BaseEncoder
-from sklearn.preprocessing import MultiLabelBinarizer
-
-
-
[docs]class MultiHotEncoder(BaseEncoder): - def __init__(self, is_target: bool = False): - super().__init__(is_target) - self._binarizer = MultiLabelBinarizer() - self._seen = set() - self.output_size = None - - @staticmethod - def _clean_col_data(column_data): - column_data = [(arr if arr is not None else []) for arr in column_data] - column_data = [[str(x) for x in arr] for arr in column_data] - return column_data - - def prepare(self, priming_data, max_dimensions=100): - priming_data = self._clean_col_data(priming_data) - self._binarizer.fit(priming_data + [('None')]) - for arr in priming_data: - for x in arr: - self._seen.add(x) - self.is_prepared = True - self.output_size = len(self.encode(priming_data[0:1])[0]) - - def encode(self, column_data): - column_data = self._clean_col_data(column_data) - data_array = self._binarizer.transform(column_data) - return torch.Tensor(data_array) - - def decode(self, vectors): - # It these are logits output by the neural network, we need to treshold them to binary vectors - vectors = np.where(vectors > 0, 1, 0) - words_tuples = self._binarizer.inverse_transform(vectors) - return [list(w) for w in words_tuples]
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/categorical/onehot.html b/docs/_modules/lightwood/encoder/categorical/onehot.html deleted file mode 100644 index 3456469ca..000000000 --- a/docs/_modules/lightwood/encoder/categorical/onehot.html +++ /dev/null @@ -1,347 +0,0 @@ - - - - - - - - - - lightwood.encoder.categorical.onehot — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.categorical.onehot
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.categorical.onehot

-import torch
-import numpy as np
-from scipy.special import softmax
-from lightwood.encoder.text.helpers.rnn_helpers import Lang
-from lightwood.helpers.log import log
-from lightwood.encoder.base import BaseEncoder
-
-UNCOMMON_WORD = '__mdb_unknown_cat'
-UNCOMMON_TOKEN = 0
-
-
-
[docs]class OneHotEncoder(BaseEncoder): - - def __init__(self, is_target=False, target_class_distribution=None, handle_unknown='unknown_token'): - super().__init__(is_target) - self._lang = None - self.rev_map = {} - - if handle_unknown not in {"unknown_token", "return_zeros"}: - raise ValueError(f"handle_unknown should be either 'unknown_token' or 'return_zeros', got {handle_unknown}") - else: - self.handle_unknown = handle_unknown - - if self.is_target: - self.target_class_distribution = target_class_distribution - self.index_weights = None - - def prepare(self, priming_data, max_dimensions=20000): - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - self._lang = Lang('default') - if self.handle_unknown == "return_zeros": - priming_data = [x for x in priming_data if x is not None] - self._lang.index2word = {} - self._lang.word2index = {} - self._lang.n_words = 0 - else: # self.handle_unknown == "unknown_token" - priming_data = [x if x is not None else UNCOMMON_WORD for x in priming_data] - self._lang.index2word = {UNCOMMON_TOKEN: UNCOMMON_WORD} - self._lang.word2index = {UNCOMMON_WORD: UNCOMMON_TOKEN} - self._lang.word2count[UNCOMMON_WORD] = 0 - self._lang.n_words = 1 - - for category in priming_data: - if category is not None: - self._lang.addWord(str(category)) - - while self._lang.n_words > max_dimensions: - if self.handle_unknown == "return_zeros": - necessary_words = [] - else: # self.handle_unknown == "unknown_token" - necessary_words = [UNCOMMON_WORD] - least_occuring_words = self._lang.getLeastOccurring(n=len(necessary_words) + 1) - - word_to_remove = None - for word in least_occuring_words: - if word not in necessary_words: - word_to_remove = word - break - - self._lang.removeWord(word_to_remove) - - if self.is_target: - self.index_weights = [None] * self._lang.n_words - if self.target_class_distribution is not None: - self.index_weights[0] = np.mean(list(self.target_class_distribution.values())) - else: - self.index_weights[0] = 1 - for word in set(priming_data): - if self.target_class_distribution is not None: - self.index_weights[self._lang.word2index[str(word)]] = 1 / self.target_class_distribution[word] - else: - self.index_weights[self._lang.word2index[str(word)]] = 1 - self.index_weights = torch.Tensor(self.index_weights) - - self.output_size = self._lang.n_words - self.rev_map = self._lang.index2word - self.is_prepared = True - - def encode(self, column_data): - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - ret = [] - v_len = self._lang.n_words - - for word in column_data: - encoded_word = [0] * v_len - if word is not None: - word = str(word) - if self.handle_unknown == "return_zeros": - if word in self._lang.word2index: - index = self._lang.word2index[word] - encoded_word[index] = 1 - else: - # Encoding an unknown value will result in a vector of zeros - log.warning('Trying to encode a value never seen before, returning vector of zeros') - else: # self.handle_unknown == "unknown_token" - index = self._lang.word2index[word] if word in self._lang.word2index else UNCOMMON_TOKEN - encoded_word[index] = 1 - - ret.append(encoded_word) - - return torch.Tensor(ret) - - def decode(self, encoded_data, return_raw=False): - encoded_data_list = encoded_data.tolist() - ret = [] - probs = [] - - for vector in encoded_data_list: - # Logits and onehots are not the same in definition - # But this explicitly operates on logits; it will take care of - # the one hot (so you can pass something in the softmax logit space) - # But will not affect something that is already OHE. - - all_zeros = not np.any(vector) - if self.handle_unknown == "return_zeros" and all_zeros: - ret.append(UNCOMMON_WORD) - else: # self.handle_unknown == "unknown_token" - ohe_index = np.argmax(vector) - ret.append(self._lang.index2word[ohe_index]) - - if return_raw: - probs.append(softmax(vector).tolist()) - - if return_raw: - return ret, probs, self.rev_map - else: - return ret
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/datetime/datetime.html b/docs/_modules/lightwood/encoder/datetime/datetime.html deleted file mode 100644 index 9260491d9..000000000 --- a/docs/_modules/lightwood/encoder/datetime/datetime.html +++ /dev/null @@ -1,306 +0,0 @@ - - - - - - - - - - lightwood.encoder.datetime.datetime — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.datetime.datetime
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.datetime.datetime

-import datetime
-import calendar
-from typing import Optional
-import torch
-from lightwood.encoder.base import BaseEncoder
-from lightwood.helpers.general import is_none
-
-
-
[docs]class DatetimeEncoder(BaseEncoder): - """ - This encoder produces an encoded representation for timestamps. - - The approach consists on decomposing the timestamp objects into its constituent units (e.g. day-of-week, month, year, etc), and describing each of those with a single value that represents the magnitude in a sensible cycle length. - """ # noqa - def __init__(self, is_target: bool = False): - super().__init__(is_target) - self.fields = ['year', 'month', 'day', 'weekday', 'hour', 'minute', 'second'] - self.constants = {'year': 3000.0, 'month': 12.0, 'weekday': 7.0, - 'hour': 24.0, 'minute': 60.0, 'second': 60.0} - self.output_size = 7 - - def prepare(self, priming_data): - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - self.is_prepared = True - -
[docs] def encode(self, data): - """ - :param data: # @TODO: receive a consistent data type here; currently either list of lists or pd.Series w/lists - :return: encoded data - """ - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - ret = [self.encode_one(unix_timestamp) for unix_timestamp in data] - - return torch.Tensor(ret)
- -
[docs] def encode_one(self, unix_timestamp: Optional[float]): - """ - Encodes a list of unix_timestamps, or a list of tensors with unix_timestamps - :param data: list of unix_timestamps (unix_timestamp resolution is seconds) - :return: a list of vectors - """ - if is_none(unix_timestamp): - vector = [0] * len(self.fields) - else: - c = self.constants - date = datetime.datetime.fromtimestamp(unix_timestamp) - day_constant = calendar.monthrange(date.year, date.month)[1] - vector = [date.year / c['year'], date.month / c['month'], date.day / day_constant, - date.weekday() / c['weekday'], date.hour / c['hour'], - date.minute / c['minute'], date.second / c['second']] - return vector
- - def decode(self, encoded_data, return_as_datetime=False): - ret = [] - if len(encoded_data.shape) > 2 and encoded_data.shape[0] == 1: - encoded_data = encoded_data.squeeze(0) - - for vector in encoded_data.tolist(): - ret.append(self.decode_one(vector, return_as_datetime=return_as_datetime)) - - return ret - - def decode_one(self, vector, return_as_datetime=False): - if sum(vector) == 0: - decoded = None - - else: - c = self.constants - - year = max(0, round(vector[0] * c['year'])) - month = max(1, min(12, round(vector[1] * c['month']))) - day_constant = calendar.monthrange(year, month)[-1] - day = max(1, min(round(vector[2] * day_constant), day_constant)) - hour = max(0, min(23, round(vector[4] * c['hour']))) - minute = max(0, min(59, round(vector[5] * c['minute']))) - second = max(0, min(59, round(vector[6] * c['second']))) - - dt = datetime.datetime(year=year, month=month, day=day, hour=hour, - minute=minute, second=second) - - if return_as_datetime is True: - decoded = dt - else: - decoded = round(dt.timestamp()) - - return decoded
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/datetime/datetime_sin_normalizer.html b/docs/_modules/lightwood/encoder/datetime/datetime_sin_normalizer.html deleted file mode 100644 index 6b999d59a..000000000 --- a/docs/_modules/lightwood/encoder/datetime/datetime_sin_normalizer.html +++ /dev/null @@ -1,326 +0,0 @@ - - - - - - - - - - lightwood.encoder.datetime.datetime_sin_normalizer — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.datetime.datetime_sin_normalizer
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.datetime.datetime_sin_normalizer

-import datetime
-import calendar
-import numpy as np
-import pandas as pd  # @TODO: remove?
-import torch
-from lightwood.encoder.base import BaseEncoder
-from collections.abc import Iterable
-from lightwood.helpers.general import is_none
-
-
-
[docs]class DatetimeNormalizerEncoder(BaseEncoder): - def __init__(self, is_target: bool = False, sinusoidal: bool = False): - super().__init__(is_target) - self.sinusoidal = sinusoidal - self.fields = ['year', 'month', 'day', 'weekday', 'hour', 'minute', 'second'] - self.constants = {'year': 3000.0, 'month': 12.0, 'weekday': 7.0, - 'hour': 24.0, 'minute': 60.0, 'second': 60.0} - if self.sinusoidal: - self.output_size = 2 - else: - self.output_size = 7 - - def prepare(self, priming_data): - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - self.is_prepared = True - -
[docs] def encode(self, data): - """ - :param data: # @TODO: receive a consistent data type here; currently either list of lists or pd.Series w/lists - :return: encoded data - """ - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - if isinstance(data, pd.Series): - data = data.values - if not isinstance(data[0], Iterable): - data = [data] - - ret = [self.encode_one(row) for row in data] - - return torch.Tensor(ret)
- -
[docs] def encode_one(self, data): - """ - Encodes a list of unix_timestamps, or a list of tensors with unix_timestamps - :param data: list of unix_timestamps (unix_timestamp resolution is seconds) - :return: a list of vectors - """ - ret = [] - for unix_timestamp in data: - if is_none(unix_timestamp): - if self.sinusoidal: - vector = [0, 1] * len(self.fields) - else: - vector = [0] * len(self.fields) - else: - c = self.constants - if isinstance(unix_timestamp, torch.Tensor): - unix_timestamp = unix_timestamp.item() - date = datetime.datetime.fromtimestamp(unix_timestamp) - day_constant = calendar.monthrange(date.year, date.month)[1] - vector = [date.year / c['year'], date.month / c['month'], date.day / day_constant, - date.weekday() / c['weekday'], date.hour / c['hour'], - date.minute / c['minute'], date.second / c['second']] - if self.sinusoidal: - vector = np.array([(np.sin(n), np.cos(n)) for n in vector]).flatten() - - ret.append(vector) - - return ret
- - def decode(self, encoded_data, return_as_datetime=False): - ret = [] - if len(encoded_data.shape) > 2 and encoded_data.shape[0] == 1: - encoded_data = encoded_data.squeeze(0) - - for vector in encoded_data.tolist(): - ret.append(self.decode_one(vector, return_as_datetime=return_as_datetime)) - - return ret - - def decode_one(self, vector, return_as_datetime=False): - if sum(vector) == 0: - decoded = None - - else: - if self.sinusoidal: - vector = list(map(lambda x: np.arcsin(x), vector))[::2] - c = self.constants - - year = max(0, round(vector[0] * c['year'])) - month = max(1, min(12, round(vector[1] * c['month']))) - day_constant = calendar.monthrange(year, month)[-1] - day = max(1, min(round(vector[2] * day_constant), day_constant)) - hour = max(0, min(23, round(vector[4] * c['hour']))) - minute = max(0, min(59, round(vector[5] * c['minute']))) - second = max(0, min(59, round(vector[6] * c['second']))) - - dt = datetime.datetime(year=year, month=month, day=day, hour=hour, - minute=minute, second=second) - - if return_as_datetime is True: - decoded = dt - else: - decoded = round(dt.timestamp()) - - return decoded
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/image/img_2_vec.html b/docs/_modules/lightwood/encoder/image/img_2_vec.html deleted file mode 100644 index eb1f05c20..000000000 --- a/docs/_modules/lightwood/encoder/image/img_2_vec.html +++ /dev/null @@ -1,285 +0,0 @@ - - - - - - - - - - lightwood.encoder.image.img_2_vec — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.image.img_2_vec
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.image.img_2_vec

-from typing import List
-import logging
-import torch
-import torchvision.transforms as transforms
-from PIL import Image
-import pandas as pd
-from lightwood.encoder.image.helpers.img_to_vec import Img2Vec
-from lightwood.encoder.base import BaseEncoder
-
-
-
[docs]class Img2VecEncoder(BaseEncoder): - is_trainable_encoder: bool = True - - def __init__(self, stop_after: int = 3600, is_target: bool = False): - super().__init__(is_target) - # # I think we should make this an enum, something like: speed, balance, accuracy - # self.aim = aim - self.is_prepared = False - - self._scaler = transforms.Resize((224, 224)) - self._normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) - self._to_tensor = transforms.ToTensor() - self._img_to_tensor = transforms.Compose([ - self._scaler, - self._to_tensor, - self._normalize - ]) - self.stop_after = stop_after - - pil_logger = logging.getLogger('PIL') - pil_logger.setLevel(logging.ERROR) - - def prepare(self, train_priming_data: pd.Series, dev_priming_data: pd.Series): - # @TODO: Add a bit of training here (maybe? depending on time aim) - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - self.model = Img2Vec() - self.output_size = self.model.output_size - self.is_prepared = True - - def to(self, device, available_devices): - self.model.to(device, available_devices) - return self - -
[docs] def encode(self, images: List[str]) -> torch.Tensor: - """ - Encode list of images - - :param images: list of images, each image is a path to a file or a url - :return: a torch.floatTensor - """ - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - img_tensors = [self._img_to_tensor( - Image.open(img_path) - ) for img_path in images] - vec_arr = [] - - self.model.eval() - with torch.no_grad(): - for img_tensor in img_tensors: - vec = self.model(img_tensor.unsqueeze(0), batch=False) - vec_arr.append(vec) - return torch.stack(vec_arr).to('cpu')
- - def decode(self, encoded_values_tensor): - raise Exception('This encoder is not bi-directional')
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/numeric/numeric.html b/docs/_modules/lightwood/encoder/numeric/numeric.html deleted file mode 100644 index ee564f7a2..000000000 --- a/docs/_modules/lightwood/encoder/numeric/numeric.html +++ /dev/null @@ -1,347 +0,0 @@ - - - - - - - - - - lightwood.encoder.numeric.numeric — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.numeric.numeric
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.numeric.numeric

-import math
-import torch
-import numpy as np
-from lightwood.encoder.base import BaseEncoder
-from lightwood.helpers.log import log
-from lightwood.helpers.general import is_none
-
-
-
[docs]class NumericEncoder(BaseEncoder): - - def __init__(self, data_type=None, is_target: bool = False, positive_domain: bool = False): - super().__init__(is_target) - self._type = data_type - self._abs_mean = None - self.positive_domain = positive_domain - self.decode_log = False - self.output_size = 4 if not self.is_target else 3 - - def prepare(self, priming_data): - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - value_type = 'int' - for number in priming_data: - try: - number = float(number) - except Exception: - continue - - if np.isnan(number): - err = 'Lightwood does not support working with NaN values !' - log.warning(err) - continue - - if int(number) != number: - value_type = 'float' - - self._type = value_type if self._type is None else self._type - non_null_priming_data = [float(str(x).replace(',', '.')) for x in priming_data if not is_none(x)] - self._abs_mean = np.mean(np.abs(non_null_priming_data)) - self.is_prepared = True - - def encode(self, data): - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - ret = [] - for real in data: - try: - real = float(real) - except Exception: - try: - real = float(real.replace(',', '.')) - except Exception: - real = None - if self.is_target: - vector = [0] * 3 - if real is not None and self._abs_mean > 0: - vector[0] = 1 if real < 0 and not self.positive_domain else 0 - vector[1] = math.log(abs(real)) if abs(real) > 0 else -20 - vector[2] = real / self._abs_mean - else: - log.debug(f'Can\'t encode target value: {real}') - - else: - vector = [0] * 4 - try: - if is_none(real): - vector[0] = 0 - else: - vector[0] = 1 - vector[1] = math.log(abs(real)) if abs(real) > 0 else -20 - vector[2] = 1 if real < 0 and not self.positive_domain else 0 - vector[3] = real / self._abs_mean - except Exception as e: - vector = [0] * 4 - log.error(f'Can\'t encode input value: {real}, exception: {e}') - - ret.append(vector) - - return torch.Tensor(ret) - - def decode(self, encoded_values, decode_log=None) -> list: - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - if decode_log is None: - decode_log = self.decode_log - - ret = [] - if isinstance(encoded_values, torch.Tensor): - encoded_values = encoded_values.tolist() - - for vector in encoded_values: - if self.is_target: - if np.isnan( - vector[0]) or vector[0] == float('inf') or np.isnan( - vector[1]) or vector[1] == float('inf') or np.isnan( - vector[2]) or vector[2] == float('inf'): - log.error(f'Got weird target value to decode: {vector}') - real_value = pow(10, 63) - else: - if decode_log: - sign = -1 if vector[0] > 0.5 else 1 - try: - real_value = math.exp(vector[1]) * sign - except OverflowError: - real_value = pow(10, 63) * sign - else: - real_value = vector[2] * self._abs_mean - - if self.positive_domain: - real_value = abs(real_value) - - if self._type == 'int': - real_value = int(real_value) - - else: - if vector[0] < 0.5: - ret.append(None) - continue - - real_value = vector[3] * self._abs_mean - - if self._type == 'int': - real_value = round(real_value) - - if isinstance(real_value, torch.Tensor): - real_value = real_value.item() - ret.append(real_value) - return ret
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/numeric/ts_array_numeric.html b/docs/_modules/lightwood/encoder/numeric/ts_array_numeric.html deleted file mode 100644 index 61267f070..000000000 --- a/docs/_modules/lightwood/encoder/numeric/ts_array_numeric.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - - - - - - lightwood.encoder.numeric.ts_array_numeric — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.numeric.ts_array_numeric
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.numeric.ts_array_numeric

-import torch
-import torch.nn.functional as F
-from lightwood.encoder import BaseEncoder
-from lightwood.encoder.numeric import TsNumericEncoder
-
-
-
[docs]class TsArrayNumericEncoder(BaseEncoder): - """ - Variant of vanilla numerical encoder, supports dynamic mean re-scaling - """ - - def __init__(self, timesteps: int, is_target: bool = False, positive_domain: bool = False, grouped_by=None): - super(TsArrayNumericEncoder, self).__init__(is_target=is_target) - # time series normalization params - self.normalizers = None - self.group_combinations = None - self.dependencies = grouped_by - self.data_window = timesteps - self.positive_domain = positive_domain - self.sub_encoder = TsNumericEncoder(is_target=is_target, positive_domain=positive_domain, grouped_by=grouped_by) - self.output_size = self.data_window * self.sub_encoder.output_size - - def prepare(self, priming_data): - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - - self.sub_encoder.prepare(priming_data) - self.is_prepared = True - -
[docs] def encode(self, data, dependency_data={}): - """ - :param dependency_data: dict with grouped_by column info, to retrieve the correct normalizer for each datum - :return: tensor with shape (batch, NxK) where N: self.data_window and K: sub-encoder # of output features - """ # noqa - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - if not dependency_data: - dependency_data = {'__default': [None] * len(data)} - - ret = [] - for data_point in data: - ret.append(self.sub_encoder.encode([data_point], dependency_data=dependency_data)) - - ret = torch.hstack(ret) - padding_size = self.output_size - ret.shape[-1] - - if padding_size > 0: - ret = F.pad(ret, (0, padding_size)) - - return ret
- - def decode(self, encoded_values, dependency_data=None, return_all=False): - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - encoded_values = encoded_values.reshape(encoded_values.shape[0], - self.data_window, - self.sub_encoder.output_size) - - ret = [] - for encoded_timestep in torch.split(encoded_values, 1, dim=1): - ret.extend(self.sub_encoder.decode(encoded_timestep.squeeze(1), dependency_data=dependency_data)) - - return ret
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/numeric/ts_numeric.html b/docs/_modules/lightwood/encoder/numeric/ts_numeric.html deleted file mode 100644 index 9dc684e9c..000000000 --- a/docs/_modules/lightwood/encoder/numeric/ts_numeric.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - - - lightwood.encoder.numeric.ts_numeric — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.numeric.ts_numeric
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.numeric.ts_numeric

-import math
-import torch
-import numpy as np
-from lightwood.encoder.numeric import NumericEncoder
-from lightwood.helpers.log import log
-
-
-
[docs]class TsNumericEncoder(NumericEncoder): - """ - Variant of vanilla numerical encoder, supports dynamic mean re-scaling - """ - is_timeseries_encoder: bool = True - - def __init__(self, is_target: bool = False, positive_domain: bool = False, grouped_by=None): - super(TsNumericEncoder, self).__init__(is_target=is_target, positive_domain=positive_domain) - # time series normalization params - self.normalizers = None - self.group_combinations = None - self.dependencies = grouped_by - self.output_size = 2 if is_target else 3 - -
[docs] def encode(self, data, dependency_data={}): - """ - :param dependency_data: dict with grouped_by column info, to retrieve the correct normalizer for each datum - """ # noqa - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - if not dependency_data: - dependency_data = {'__default': [None] * len(data)} - - ret = [] - for real, group in zip(data, list(zip(*dependency_data.values()))): - try: - real = float(real) - except Exception: - try: - real = float(real.replace(',', '.')) - except Exception: - real = None - if self.is_target: - vector = [0] * 2 - if group is not None and self.normalizers is not None: - try: - mean = self.normalizers[frozenset(group)].abs_mean - except KeyError: - # novel group-by, we use default normalizer mean - mean = self.normalizers['__default'].abs_mean - else: - mean = self._abs_mean - - if real is not None: - vector[0] = 1 if real < 0 and not self.positive_domain else 0 - vector[1] = real / mean if mean != 0 else real - else: - raise Exception(f'Can\'t encode target value: {real}') - - else: - vector = [0] * 3 - try: - if real is not None: - vector[0] = 1 - vector[1] = 1 if real < 0 and not self.positive_domain else 0 - vector[2] = real / self._abs_mean - except Exception as e: - log.error(f'Can\'t encode input value: {real}, exception: {e}') - - ret.append(vector) - - return torch.Tensor(ret)
- - def decode(self, encoded_values, decode_log=None, dependency_data=None): - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - if decode_log is None: - decode_log = self.decode_log - - ret = [] - if not dependency_data: - dependency_data = {'__default': [None] * len(encoded_values)} - if isinstance(encoded_values, torch.Tensor): - encoded_values = encoded_values.tolist() - - for vector, group in zip(encoded_values, list(zip(*dependency_data.values()))): - if self.is_target: - if np.isnan(vector[0]) or vector[0] == float('inf') or np.isnan(vector[1]) or vector[1] == float('inf'): - log.error(f'Got weird target value to decode: {vector}') - real_value = pow(10, 63) - else: - if decode_log: - sign = -1 if vector[0] > 0.5 else 1 - try: - real_value = math.exp(vector[1]) * sign - except OverflowError: - real_value = pow(10, 63) * sign - else: - if group is not None and self.normalizers is not None: - try: - mean = self.normalizers[frozenset(group)].abs_mean - except KeyError: - # decode new group with default normalizer - mean = self.normalizers['__default'].abs_mean - else: - mean = self._abs_mean - - real_value = vector[1] * mean if mean != 0 else vector[1] - - if self.positive_domain: - real_value = abs(real_value) - - if self._type == 'int': - real_value = int(round(real_value, 0)) - - else: - if vector[0] < 0.5: - ret.append(None) - continue - - real_value = vector[2] * self._abs_mean - - if self._type == 'int': - real_value = round(real_value) - - ret.append(real_value) - return ret
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/text/pretrained.html b/docs/_modules/lightwood/encoder/text/pretrained.html deleted file mode 100644 index ef9f9ef43..000000000 --- a/docs/_modules/lightwood/encoder/text/pretrained.html +++ /dev/null @@ -1,590 +0,0 @@ - - - - - - - - - - lightwood.encoder.text.pretrained — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.text.pretrained
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.text.pretrained

-"""
-2021.07.16
-Adding flag "embedmode".
-
-Embed-mode is made for when text is one of many columns in the model.
-IF the model is direct (text) -> output, then it's worth just using
-the fine-tuned encoder as the "mixer" persay; thus, turn embed-mode OFF.
-
-This means there are 3 possible modes:
-
-(1) Classification
-    -> Fine tuned, output of encoder is [CLS] embedding
-    -> Fine tuned, output of encoder is the class value
-(2) Regression
-    -> Untrained; output of encoder is [CLS] embedding
-
-Training with regression is WIP; seems like quantile-binning is the best approach
-but using MSE loss while fine-tuning did not demonstrate decent results. Particularly
-because the mixer seems to address this.
-
-2021.03.18
-
-## Padding changes the answer slightly in the model.
-
-The following text encoder uses huggingface's
-Distilbert. Internal benchmarks suggest
-1 epoch of fine tuning is ideal [classification].
-Training ONLY occurs for classification. Regression problems
-are not trained, embeddings are directly generated.
-
-See: https://huggingface.co/transformers/training.html
-for further details.
-
-Currently the model supports only distilbert.
-
-When instantiating the DistilBertForSeq.Class object,
-num_labels indicates whether you use classification or regression.
-
-See: https://huggingface.co/transformers/model_doc/distilbert.html#distilbertforsequenceclassification
-under the 'labels' command
-
-For classification - we use num_labels = 1 + num_classes ***
-
-If you do num_classes + 1, we reserve the LAST label
-as the "unknown" label; this is different from the original
-distilbert model. (prior to 2021.03)
-
-TODOs:
-+ Regression
-+ Batch encodes() tokenization step
-+ Look into auto-encoding lower dimensional representations
-of the output embedding
-+ Look into regression tuning (will require grad. clipping)
-+ Look into tuning to the encoded space of output.
-"""
-import time
-import torch
-from torch.utils.data import DataLoader
-import os
-import pandas as pd
-from lightwood.encoder.text.helpers.pretrained_helpers import TextEmbed
-from lightwood.helpers.device import get_devices
-from lightwood.encoder.base import BaseEncoder
-from lightwood.helpers.log import log
-from lightwood.helpers.torch import LightwoodAutocast
-from lightwood.api import dtype
-from transformers import (
-    DistilBertModel,
-    DistilBertForSequenceClassification,
-    DistilBertTokenizerFast,
-    AdamW,
-    get_linear_schedule_with_warmup,
-)
-from lightwood.helpers.general import is_none
-
-
-
[docs]class PretrainedLangEncoder(BaseEncoder): - is_trainable_encoder: bool = True - - """ - Pretrained language models. - Option to train on a target encoding of choice. - - Args: - is_target ::Bool; data column is the target of ML. - model_name ::str; name of pre-trained model - custom_tokenizer ::function; custom tokenizing function - batch_size ::int; size of batch - max_position_embeddings ::int; max sequence length of input text - custom_train ::Bool; If true, trains model on target procided - frozen ::Bool; If true, freezes transformer layers during training. - epochs ::int; number of epochs to train model with - embed_mode ::Bool; If true, assumes the output of the encode() step is the CLS embedding. - """ - - def __init__( - self, - stop_after: int, - is_target=False, - model_name="distilbert", - custom_tokenizer=None, - batch_size=10, - max_position_embeddings=None, - frozen=False, - epochs=1, - output_type=None, - embed_mode=True, - ): - super().__init__(is_target) - - self.output_type = output_type - self.name = model_name + " text encoder" - log.info(self.name) - - self._max_len = max_position_embeddings - self._frozen = frozen - self._batch_size = batch_size - self._epochs = epochs - - # Model setup - self._tokenizer = custom_tokenizer - self._model = None - self.model_type = None - - # TODO: Other LMs; Distilbert is a good balance of speed/performance - self._classifier_model_class = DistilBertForSequenceClassification - self._embeddings_model_class = DistilBertModel - self._tokenizer_class = DistilBertTokenizerFast - self._pretrained_model_name = "distilbert-base-uncased" - - self.device, _ = get_devices() - self.stop_after = stop_after - - self.embed_mode = embed_mode - self.uses_target = True - self.output_size = None - - # DEBUGGING!!! - if self.embed_mode: - log.info("Embedding mode on. [CLS] embedding dim output of encode()") - else: - log.info("Embedding mode off. Logits are output of encode()") - -
[docs] def prepare(self, train_priming_data: pd.Series, dev_priming_data: pd.Series, encoded_target_values: torch.Tensor): - """ - Prepare the encoder by training on the target. - - Training data must be a dict with "targets" avail. - Automatically assumes this. - """ - os.environ['TOKENIZERS_PARALLELISM'] = 'true' - priming_data = pd.concat([train_priming_data, dev_priming_data]) - priming_data = priming_data.values - if self.is_prepared: - raise Exception("Encoder is already prepared.") - - # TODO: Make tokenizer custom with partial function; feed custom->model - if self._tokenizer is None: - self._tokenizer = self._tokenizer_class.from_pretrained(self._pretrained_model_name) - - # Replaces empty strings with '' - priming_data = [x if x is not None else "" for x in priming_data] - - # Checks training data details - # TODO: Regression flag; currently training supported for categorical only - - if (self.output_type in (dtype.categorical, dtype.binary)): - log.info("Training model.") - - # Prepare priming data into tokenized form + attention masks - text = self._tokenizer(priming_data, truncation=True, padding=True) - - log.info("\tOutput trained is categorical") - - labels = encoded_target_values.argmax(dim=1) - - # Construct the model - self._model = self._classifier_model_class.from_pretrained( - self._pretrained_model_name, - num_labels=len(encoded_target_values[0]), - ).to(self.device) - - # Construct the dataset for training - xinp = TextEmbed(text, labels) - dataset = DataLoader(xinp, batch_size=self._batch_size, shuffle=True) - - # If max length not set, adjust - if self._max_len is None: - self._max_len = self._model.config.max_position_embeddings - - if self._frozen: - log.info("\tFrozen Model + Training Classifier Layers") - """ - Freeze the base transformer model and train - a linear layer on top - """ - # Freeze all the transformer parameters - for param in self._model.base_model.parameters(): - param.requires_grad = False - - optimizer_grouped_parameters = self._model.parameters() - - else: - log.info("\tFine-tuning model") - """ - Fine-tuning parameters with weight decay - """ - no_decay = [ - "bias", - "LayerNorm.weight", - ] # decay on all terms EXCLUDING bias/layernorms - optimizer_grouped_parameters = [ - { - "params": [ - p - for n, p in self._model.named_parameters() - if not any(nd in n for nd in no_decay) - ], - "weight_decay": 0.01, - }, - { - "params": [ - p - for n, p in self._model.named_parameters() - if any(nd in n for nd in no_decay) - ], - "weight_decay": 0.0, - }, - ] - - optimizer = AdamW(optimizer_grouped_parameters, lr=1e-5) - scheduler = get_linear_schedule_with_warmup( - optimizer, - num_warmup_steps=0, # default value for GLUE - num_training_steps=len(dataset) * self._epochs, - ) - - # Train model; declare optimizer earlier if desired. - self._tune_model( - dataset, optim=optimizer, scheduler=scheduler, n_epochs=self._epochs - ) - - else: - log.info("Target is not classification; Embeddings Generator only") - - self.model_type = "embeddings_generator" - self._model = self._embeddings_model_class.from_pretrained( - self._pretrained_model_name - ).to(self.device) - - # TODO: Not a great flag - # Currently, if the task is not classification, you must have - # an embedding generator only. - if self.embed_mode is False: - log.info("Embedding mode must be ON for non-classification targets.") - self.embed_mode = True - - self.is_prepared = True - encoded = self.encode(priming_data[0:1]) - self.output_size = len(encoded[0])
- - def _tune_model(self, dataset, optim, scheduler, n_epochs=1): - """ - Given a model, train for n_epochs. - Specifically intended for tuning; it does NOT use loss/ - stopping criterion. - - model - torch.nn model; - dataset - torch.DataLoader; dataset to train - device - torch.device; cuda/cpu - log - lightwood.logger.log; log.info output - optim - transformers.optimization.AdamW; optimizer - scheduler - scheduling params - n_epochs - number of epochs to train - - """ - self._model.train() - - if optim is None: - log.info("No opt. provided, setting all params with AdamW.") - optim = AdamW(self._model.parameters(), lr=5e-5) - else: - log.info("Optimizer provided") - - if scheduler is None: - log.info("No scheduler provided.") - else: - log.info("Scheduler provided.") - - started = time.time() - for epoch in range(n_epochs): - total_loss = 0 - if time.time() - started > self.stop_after: - break - - for batch in dataset: - optim.zero_grad() - - with LightwoodAutocast(): - inpids = batch["input_ids"].to(self.device) - attn = batch["attention_mask"].to(self.device) - labels = batch["labels"].to(self.device) - outputs = self._model(inpids, attention_mask=attn, labels=labels) - loss = outputs[0] - - total_loss += loss.item() - - loss.backward() - optim.step() - if scheduler is not None: - scheduler.step() - - self._train_callback(epoch, total_loss / len(dataset)) - - def _train_callback(self, epoch, loss): - log.info(f"{self.name} at epoch {epoch+1} and loss {loss}!") - -
[docs] def encode(self, column_data): - """ - TODO: Maybe batch the text up; may take too long - Given column data, encode the dataset. - - Currently, returns the embedding of the pre-classifier layer. - - Args: - column_data:: [list[str]] list of text data in str form - - Returns: - encoded_representation:: [torch.Tensor] N_sentences x Nembed_dim - """ - if self.is_prepared is False: - raise Exception("You need to first prepare the encoder.") - - # Set model to testing/eval mode. - self._model.eval() - - encoded_representation = [] - - with torch.no_grad(): - # Set the weights; this is GPT-2 - for text in column_data: - - # Omit NaNs - if is_none(text): - text = "" - - # Tokenize the text with the built-in tokenizer. - inp = self._tokenizer.encode( - text, truncation=True, return_tensors="pt" - ).to(self.device) - - if self.embed_mode: # Embedding mode ON; return [CLS] - output = self._model.base_model(inp).last_hidden_state[:, 0] - - # If the model has a pre-classifier layer, use this embedding. - if hasattr(self._model, "pre_classifier"): - output = self._model.pre_classifier(output) - - else: # Embedding mode off; return classes - output = self._model(inp).logits - - encoded_representation.append(output.detach()) - - return torch.stack(encoded_representation).squeeze(1).to('cpu')
- - def decode(self, encoded_values_tensor, max_length=100): - raise Exception("Decoder not implemented.") - - def to(self, device, available_devices): - for v in vars(self): - attr = getattr(self, v) - if isinstance(attr, torch.nn.Module): - attr.to(device) - return self
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/text/short.html b/docs/_modules/lightwood/encoder/text/short.html deleted file mode 100644 index 2e2c3ceb9..000000000 --- a/docs/_modules/lightwood/encoder/text/short.html +++ /dev/null @@ -1,331 +0,0 @@ - - - - - - - - - - lightwood.encoder.text.short — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.text.short
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.text.short

-from typing import List
-import torch
-from lightwood.encoder import BaseEncoder
-from lightwood.encoder.categorical import CategoricalAutoEncoder
-from lightwood.helpers.text import tokenize_text
-from lightwood.helpers.torch import concat_vectors_and_pad, average_vectors
-import pandas as pd
-
-
-
[docs]class ShortTextEncoder(BaseEncoder): - def __init__(self, is_target=False, mode=None): - """ - :param is_target: - :param mode: - None or "concat" or "mean". - When None, it will be set automatically based on is_target: - (is_target) -> 'concat' - (not is_target) -> 'mean' - """ - super().__init__(is_target) - - if mode is None: - if is_target: - self._mode = 'concat' - else: - self._mode = 'mean' - else: - if mode not in ['concat', 'mean']: - self._unexpected_mode() - - if is_target and mode != 'concat': - raise ValueError('mode must be "concat" when is_target=True') - - self._mode = mode - - # Defined in self.prepare() - self._combine_fn = None - self.max_words_per_sent = None - self.cae = CategoricalAutoEncoder(is_target=is_target, max_encoded_length=100) - self.is_prepared = False - - def _unexpected_mode(self): - raise ValueError('unexpected combine value (must be "mean" or "concat")') - - # defining both of these as normal functions because pickle can't deal with lambdas - def _combine_concat(self, vecs): - return concat_vectors_and_pad(vecs, self.max_words_per_sent) - - def _combine_mean(self, vecs): - return average_vectors(vecs) - - def prepare(self, priming_data): - no_null_sentences = (x if x is not None else '' for x in priming_data) - unique_tokens = set() - max_words_per_sent = 0 - for sent in no_null_sentences: - tokens = tokenize_text(sent) - max_words_per_sent = max(max_words_per_sent, len(tokens)) - for tok in tokens: - unique_tokens.add(tok) - - self.cae.prepare(pd.Series(list(unique_tokens)), pd.Series([])) - - if self._mode == 'concat': - self.max_words_per_sent = max_words_per_sent - self._combine_fn = self._combine_concat - elif self._mode == 'mean': - self._combine_fn = self._combine_mean - else: - self._unexpected_mode() - - self.is_prepared = True - encoded = self.encode([priming_data[0]]) - self.output_size = len(encoded[0]) - - def encode(self, column_data: List[str]) -> torch.Tensor: - no_null_sentences = (x if x is not None else '' for x in column_data) - output = [] - for sent in no_null_sentences: - tokens = tokenize_text(sent) - encoded_words = self.cae.encode(tokens) - encoded_sent = self._combine_fn(encoded_words) - output.append(torch.Tensor(encoded_sent)) - output = torch.stack(output) - return output - - def decode(self, vectors): - if self._mode == 'concat': - - vec_size = self.cae.max_encoded_length - - output = [] - for vec in vectors: - - viewed_vec = vec.view(-1, vec_size) - - # Find index of first padding vector - for index, v in enumerate(viewed_vec): - if v.abs().sum() == 0: - break - else: - index = viewed_vec.size(0) - - out = self.cae.decode( - viewed_vec[:index] - ) - - output.append(out) - - return output - - elif self._mode == 'mean': - raise ValueError('decode is only defined for mode="concat"') - else: - self._unexpected_mode()
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/text/vocab.html b/docs/_modules/lightwood/encoder/text/vocab.html deleted file mode 100644 index 8901f32be..000000000 --- a/docs/_modules/lightwood/encoder/text/vocab.html +++ /dev/null @@ -1,252 +0,0 @@ - - - - - - - - - - lightwood.encoder.text.vocab — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.text.vocab
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.text.vocab

-import os
-import torch
-from transformers import DistilBertTokenizer
-from lightwood.encoder.base import BaseEncoder
-
-
-
[docs]class VocabularyEncoder(BaseEncoder): - def __init__(self, is_target: bool = False): - super().__init__(is_target) - self._tokenizer_class = DistilBertTokenizer - self._pretrained_model_name = 'distilbert-base-uncased' - self._max_len = None - self._tokenizer = None - self._pad_id = None - - def prepare(self, priming_data): - os.environ['TOKENIZERS_PARALLELISM'] = 'true' - self._max_len = max([len(x) for x in priming_data]) - self._tokenizer = self._tokenizer_class.from_pretrained(self._pretrained_model_name) - self._pad_id = self._tokenizer.convert_tokens_to_ids([self._tokenizer.pad_token])[0] - - def encode(self, column_data): - vec = [] - for text in column_data: - encoded = self._tokenizer.encode(text[:self._max_len], add_special_tokens=True) - encoded = torch.tensor(encoded + [self._pad_id] * (self._max_len - len(encoded))) - vec.append(encoded) - return torch.stack(vec) - - def decode(self, encoded_values_tensor): - vec = [] - for encoded in encoded_values_tensor: - decoded = self._tokenizer.decode(encoded) - decoded = decoded.split('[PAD]')[0].rstrip().lstrip().lstrip('[CLS] ').rstrip(' [SEP]') - vec.append(decoded) - return vec
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/encoder/time_series/rnn.html b/docs/_modules/lightwood/encoder/time_series/rnn.html deleted file mode 100644 index ccd8765fb..000000000 --- a/docs/_modules/lightwood/encoder/time_series/rnn.html +++ /dev/null @@ -1,717 +0,0 @@ - - - - - - - - - - lightwood.encoder.time_series.rnn — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.encoder.time_series.rnn
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.encoder.time_series.rnn

-import time
-from math import gcd
-from typing import List
-from copy import deepcopy
-
-import numpy as np
-import pandas as pd
-import torch
-import torch.nn as nn
-from torch import optim
-
-from lightwood.api import dtype
-from lightwood.helpers.log import log
-from lightwood.encoder.base import BaseEncoder
-from lightwood.helpers.device import get_devices
-from lightwood.helpers.torch import LightwoodAutocast
-from lightwood.encoder.datetime import DatetimeNormalizerEncoder
-from lightwood.encoder.time_series.helpers.rnn_helpers import EncoderRNNNumerical, DecoderRNNNumerical
-from lightwood.encoder.helpers import MinMaxNormalizer, CatNormalizer
-from lightwood.helpers.general import get_group_matches
-from lightwood.encoder.time_series.helpers.transformer_helpers import TransformerEncoder, get_chunk, len_to_mask
-
-
-
[docs]class TimeSeriesEncoder(BaseEncoder): - """ - Time series encoder. This module can learn features for any `order_by` temporal column, both with and without accompanying target data. - - The backbone of this encoder is either a recurrent neural network or a transformer; both structured in an encoder-decoder fashion. - """ # noqa - is_timeseries_encoder: bool = True - is_trainable_encoder: bool = True - - def __init__(self, stop_after: int, is_target=False, original_type: str = None, target: str = None, - grouped_by: List[str] = [], encoder_type='rnn'): - super().__init__(is_target) - self.device, _ = get_devices() - self.target = target - self.grouped_by = grouped_by - self._learning_rate = 0.01 - self.output_size = 128 - self._transformer_hidden_size = None - self._epochs = int(1e5) # default training epochs - self._stop_on_n_bad_epochs = 5 # stop training after N epochs where loss is worse than running avg - self._epochs_running_avg = 5 # amount of epochs for running average - self._pytorch_wrapper = torch.FloatTensor - self.is_prepared = False - self._is_setup = False - self._max_ts_length = 0 - self._sos = 0.0 # start of sequence for decoding - self._eos = 0.0 # end of input sequence -- padding value for batches - self._n_dims = 1 - self._normalizer = None - self.dep_norms = {} # dict of dict of normalizers for each dependency (can be grouped-by some column) - self._target_type = None - self._group_combinations = None - self.original_type = original_type - self.stop_after = stop_after - if encoder_type.lower() == 'rnn': - self.encoder_class = EncoderRNNNumerical - elif encoder_type.lower() == 'transformer': - self.encoder_class = TransformerEncoder - -
[docs] def setup_nn(self, ts_analysis, dependencies=None): - """This method must be executed after initializing, else types are unassigned""" - if self.original_type in (dtype.datetime, dtype.date): - self._normalizer = DatetimeNormalizerEncoder(sinusoidal=True) - self._n_dims *= len(self._normalizer.fields) * 2 # sinusoidal datetime components - elif self.original_type in (dtype.float, dtype.integer): - self._normalizer = MinMaxNormalizer() - - total_dims = self._n_dims - dec_hsize = self.output_size - - if dependencies: - for dep_name, dep in dependencies.items(): - self.dependencies.append(dep_name) - - if dep_name in self.grouped_by: - continue # we only use group column for indexing and selecting rows - - assert dep['original_type'] in (dtype.categorical, dtype.binary, - dtype.integer, dtype.float, dtype.tsarray) - - if f'__mdb_ts_previous_{self.target}' == dep_name: - self.dep_norms[dep_name] = ts_analysis['target_normalizers'] - self._group_combinations = ts_analysis['group_combinations'] - self._target_type = dep['original_type'] - - # if TS analysis yields no normalizers for this dependency, we create a generic one based on its dtype - else: - if dep['original_type'] in (dtype.categorical, dtype.binary): - self.dep_norms[dep_name]['__default'] = CatNormalizer() - else: - self.dep_norms[dep_name]['__default'] = MinMaxNormalizer() - - self.dep_norms[dep_name]['__default'].prepare(dep['data']) - self._group_combinations = {'__default': None} - - # add descriptor size to the total encoder output dimensionality - if dep['original_type'] in (dtype.categorical, dtype.binary): - total_dims += len(self.dep_norms[dep_name]['__default'].scaler.categories_[0]) - elif dep['original_type'] in (dtype.integer, dtype.float, dtype.tsarray): - total_dims += 1 - - if self.encoder_class == EncoderRNNNumerical: - self._enc_criterion = nn.MSELoss() - self._dec_criterion = self._enc_criterion - self._encoder = self.encoder_class(input_size=total_dims, - hidden_size=self.output_size).to(self.device) - elif self.encoder_class == TransformerEncoder: - self._enc_criterion = self._masked_criterion - self._dec_criterion = nn.MSELoss() - self._base_criterion = nn.MSELoss(reduction="none") - if self._transformer_hidden_size is None: - self._transformer_hidden_size = total_dims * 2 # arbitrary - - self._encoder = self.encoder_class(ninp=total_dims, - nhead=gcd(dec_hsize, total_dims), - nhid=self._transformer_hidden_size, - nlayers=1).to(self.device) - else: - raise Exception(f"Time series encoder class not supported: {self.encoder_class}") - - self._decoder = DecoderRNNNumerical(output_size=total_dims, hidden_size=dec_hsize).to(self.device) - self._parameters = list(self._encoder.parameters()) + list(self._decoder.parameters()) - self._optimizer = optim.AdamW(self._parameters, lr=self._learning_rate, weight_decay=1e-4) - self._n_dims = total_dims - self._is_setup = True
- - def to(self, device, available_devices): - if self._is_setup: - self.device = device - return super().to(device, available_devices) - return self - - def _prepare_raw_data(self, data): - """Convert to array and determine max length""" - out_data = [] - for e in data: - if not isinstance(e, torch.Tensor): - e = np.array(e, dtype=float) - e[np.isnan(e)] = 0.0 - t = torch.tensor(e, dtype=torch.float) - else: - t = e.float() - t[torch.isnan(t)] = 0.0 - out_data.append(t) - lengths = torch.tensor([len(e) for e in data], dtype=torch.float) - return out_data, lengths - - def _get_batch(self, source, start, end): - end = min(end, len(source)) - return source[start:end] - -
[docs] def prepare(self, train_priming_data: pd.Series, dev_priming_data: pd.Series, dependency_data={}, ts_analysis=None, - feedback_hoop_function=log.info, batch_size=256): - """ - :param priming_data: a list of (self._n_dims)-dimensional time series [[dim1_data], ...] - :param dependency_data: raw data from other columns - :param ts_analysis: dictionary with time analysis info (e.g. normalizers for each target group) - :param feedback_hoop_function: method to use if you want to get feedback on the training process - :param batch_size - """ - priming_data = pd.concat([train_priming_data, dev_priming_data]) - priming_data = list(priming_data.values) - - if self.is_prepared: - raise Exception('You can only call "prepare" once for a given encoder.') - else: - self.setup_nn(ts_analysis, dependency_data) - - started = time.time() - - # Convert to array and determine max length - priming_data, lengths_data = self._prepare_raw_data(priming_data) - self._max_ts_length = int(lengths_data.max()) - - if self._normalizer: - self._normalizer.prepare(priming_data) - priming_data = self._normalizer.encode(priming_data).to(self.device) - if len(priming_data.shape) < 3: - priming_data = priming_data.unsqueeze(-1) - else: - priming_data = torch.stack([d for d in priming_data]).unsqueeze(-1).to(self.device) - - # merge all normalized data into a training batch - normalized_tensors = [] - for dep_name, dep_data in dependency_data.items(): - if dep_name in self.grouped_by: - continue - if dep_data['original_type'] in (dtype.integer, dtype.float): - dep_data['group_info'] = {group: dependency_data[group]['data'] for group in self.grouped_by} - data = torch.zeros((len(priming_data), lengths_data.max().int().item(), 1)) - all_idxs = set(range(len(data))) - for group_name, normalizer in self.dep_norms[dep_name].items(): - if group_name != '__default': - idxs, subset = get_group_matches(dep_data, normalizer.combination) - normalized = normalizer.encode(subset).unsqueeze(-1) - data[idxs, :, :] = normalized - all_idxs -= set(idxs) - if len(all_idxs) > 0 and '__default' in self.dep_norms[dep_name].keys(): - default_norm = self.dep_norms[dep_name]['__default'] - subset = [dep_data['data'][idx] for idx in list(all_idxs)] - data[list(all_idxs), :, :] = torch.Tensor(default_norm.encode(subset)).unsqueeze(-1) - - else: - # categorical has only one normalizer at all times - normalizer = self.dep_norms[dep_name]['__default'] - data = normalizer.encode(dep_data['data'].values) - if len(data.shape) < 3: - data = data.unsqueeze(-1) # add feature dimension - data[torch.isnan(data)] = 0.0 - normalized_tensors.append(data) - - if normalized_tensors: - normalized_data = torch.cat(normalized_tensors, dim=-1).to(self.device) - priming_data = torch.cat([priming_data, normalized_data], dim=-1) - - self._encoder.train() - running_losses = np.full(self._epochs_running_avg, np.nan) - bad_epochs = 0 - - for epoch in range(self._epochs): - average_loss = 0 - - for batch_idx in range(0, len(priming_data), batch_size): - # setup loss and optimizer - self._optimizer.zero_grad() - loss = 0 - - # shape: (batch_size, timesteps, n_dims) - batch = self._get_batch(priming_data, batch_idx, min(batch_idx + batch_size, len(priming_data))) - - # encode and decode through time - with LightwoodAutocast(): - if self.encoder_class == TransformerEncoder: - # pack batch length info tensor - len_batch = self._get_batch(lengths_data, batch_idx, min( - batch_idx + batch_size, len(priming_data))) - batch = batch, len_batch - - next_tensor, hidden_state, dec_loss = self._encoder.bptt( - batch, self._enc_criterion, self.device) - loss += dec_loss - - else: - next_tensor, hidden_state, enc_loss = self._encoder.bptt( - batch, self._enc_criterion, self.device) - loss += enc_loss - - next_tensor, hidden_state, dec_loss = self._decoder.decode( - batch, next_tensor, self._dec_criterion, self.device, hidden_state=hidden_state) - loss += dec_loss - - loss.backward() - - self._optimizer.step() - average_loss += loss.item() - - average_loss = average_loss / len(priming_data) - batch_idx += batch_size - - if epoch > self._epochs_running_avg and average_loss > np.average(running_losses): - bad_epochs += 1 - - # update running loss - running_losses[:-1] = running_losses[1:] - running_losses[-1] = average_loss - - if feedback_hoop_function is not None: - feedback_hoop_function( - "time series encoder epoch [{epoch_n}/{total}] average_loss = {average_loss}".format( - epoch_n=epoch + 1, total=self._epochs, average_loss=average_loss)) - - if bad_epochs > self._stop_on_n_bad_epochs: - break - elif (time.time() - started) > self.stop_after: - break - - self.is_prepared = True
- - def _encode_one(self, data, previous=None, initial_hidden=None, return_next_value=False): - """ - This method encodes one single row of serial data - :param data: multidimensional time series as list of lists [[dim1_data], [dim2_data], ...] - (dim_data: string with format "x11, x12, ... x1n") - :param initial_hidden: if you want to encode from an initial hidden state other than 0s - :param return_next_value: if you want to return the next value in the time series too - - :return: either encoded_value or (encoded_value, next_value) - """ - self._encoder.eval() - with torch.no_grad(): - # Convert to array and determine max length - data, lengths_data = self._prepare_raw_data(data) - self._max_ts_length = int(lengths_data.max()) - - if self._normalizer: - data = self._normalizer.encode(data).to(self.device) - if len(data.shape) < 3: - data = data.unsqueeze(-1) - else: - data = torch.stack([d for d in data]).unsqueeze(-1).to(self.device) - - if previous is not None: - target_tensor = torch.stack(previous).to(self.device) - target_tensor[torch.isnan(target_tensor)] = 0.0 - if len(target_tensor.shape) < 3: - target_tensor = target_tensor.transpose(0, 1).unsqueeze(0) - data_tensor = torch.cat((data, target_tensor), dim=-1) - else: - data_tensor = data - - steps = data_tensor.shape[1] - - if self.encoder_class == EncoderRNNNumerical: - encoder_hidden = self._encoder.init_hidden(self.device) - encoder_hidden = encoder_hidden if initial_hidden is None else initial_hidden - - next_tensor = None - for tensor_i in range(steps): - next_tensor, encoder_hidden = self._encoder.forward(data_tensor[:, tensor_i, :].unsqueeze(dim=0), - encoder_hidden) - - else: - next_tensor = None - len_batch = self._get_batch(lengths_data, 0, len(data)) - batch_size, timesteps, _ = data_tensor.shape - - for start_chunk in range(0, timesteps, timesteps): - data, targets, lengths_chunk = get_chunk(data_tensor, len_batch, start_chunk, timesteps) - data = data.transpose(0, 1) - next_tensor, encoder_hidden = self._encoder.forward(data, lengths_chunk, self.device) - - if return_next_value: - return encoder_hidden, next_tensor - else: - return encoder_hidden - -
[docs] def encode(self, column_data, dependency_data=None, get_next_count=None): - """ - Encode a list of time series data - :param column_data: a list of (self._n_dims)-dimensional time series [[dim1_data], ...] to encode - :param get_next_count: default None, but you can pass a number X and it will return the X following predictions - on the series for each ts_data_point in column_data - :return: a list of encoded time series or if get_next_count !=0 two lists (encoded_values, projected_numbers) - """ - - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - if isinstance(column_data, pd.Series): - data = deepcopy(column_data.values) # get a copy to avoid modifying the actual data frame - else: - data = column_data - - for i in range(len(data)): - if not isinstance(data[i][0], list): - data[i] = [data[i]] # add dimension for 1D timeseries - - # include autoregressive target data - ptd = [] - if dependency_data is not None: - for dep, dep_data in dependency_data.items(): - if dep in self.grouped_by: - continue - # normalize numerical target per group-by - if self._target_type in (dtype.integer, dtype.float, dtype.tsarray): - dep_info = { - 'group_info': {group: dependency_data[group] for group in self.grouped_by}, - 'data': dep_data - } - tensor = torch.zeros((len(dep_data), len(dep_data[0]), 1)).to(self.device) - all_idxs = set(range(len(dep_data))) - - for combination in [c for c in self._group_combinations if c != '__default']: - normalizer = self.dep_norms[dep].get(frozenset(combination), None) - if normalizer is None: - normalizer = self.dep_norms[dep]['__default'] - idxs, subset = get_group_matches(dep_info, normalizer.combination) - if idxs: - tensor[idxs, :, :] = torch.Tensor(normalizer.encode(subset)).unsqueeze(-1).to(self.device) - all_idxs -= set(idxs) - - # encode all remaining rows (not belonging to any grouped combination) with default normalizer - if all_idxs: - default_norm = self.dep_norms[dep]['__default'] - subset = [dep_data[idx] for idx in all_idxs] - tensor[list(all_idxs), :, :] = torch.Tensor( - default_norm.encode(subset)).unsqueeze(-1).to(self.device) - tensor[torch.isnan(tensor)] = 0.0 - - # normalize categorical target - else: - normalizer = self.dep_norms[dep]['__default'] - tensor = normalizer.encode(dep_data) - tensor[torch.isnan(tensor)] = 0.0 - - ptd.append(tensor) - - ret = [] - next = [] - - for i, val in enumerate(data): - if get_next_count is None: - if dependency_data is not None and len(dependency_data) > 0 and len(ptd) > 0: - encoded = self._encode_one(val, previous=[values[i] for values in ptd]) - else: - encoded = self._encode_one(val) - - else: - if get_next_count <= 0: - raise Exception('get_next_count must be greater than 0') - - hidden = None - vector = val - next_i = [] - - for j in range(get_next_count): - hidden, next_reading = self._encode_one(vector, initial_hidden=hidden, return_next_value=True) - vector = [next_reading] - if j == 0: - encoded = hidden - next_i.append(next_reading) - - next_value = next_i[0][0].cpu() - - if self._normalizer: - next_value = torch.Tensor(self._normalizer.decode(next_value)) - - next.append(next_value) - - ret.append(encoded[0][0].cpu()) - - if get_next_count is None: - return torch.stack(ret) - else: - return torch.stack(ret), torch.stack(next)
- - def _decode_one(self, hidden, steps): - """ - Decodes a single time series from its encoded representation. - :param hidden: time series embedded representation tensor, with size self.output_size - :param steps: as in decode(), defines how many values to output when reconstructing - :return: decoded time series list - """ - self._decoder.eval() - with torch.no_grad(): - ret = [] - next_tensor = torch.full((1, 1, self._n_dims), self._sos, dtype=torch.float32).to(self.device) - timesteps = steps if steps else self._max_ts_length - for _ in range(timesteps): - next_tensor, hidden = self._decoder.forward(next_tensor, hidden) - ret.append(next_tensor) - return torch.stack(ret) - -
[docs] def decode(self, encoded_data, steps=None): - """ - Decode a list of embedded multidimensional time series - :param encoded_data: a list of embeddings [ e1, e2, ...] to be decoded into time series - :param steps: fixed number of timesteps to reconstruct from each embedding. - If None, encoder will output the largest length encountered during training. - :return: a list of reconstructed time series - """ - if not self.is_prepared: - raise Exception('You need to call "prepare" before calling "encode" or "decode".') - - ret = [] - for _, val in enumerate(encoded_data): - hidden = torch.unsqueeze(torch.unsqueeze(val, dim=0), dim=0).to(self.device) - reconstruction = self._decode_one(hidden, steps).cpu().squeeze().T.numpy() - - if self._n_dims == 1: - reconstruction = reconstruction.reshape(1, -1) - - if self._normalizer: - reconstruction = self._normalizer.decode(reconstruction) - - ret.append(reconstruction) - - return torch.Tensor(ret)
- - def _masked_criterion(self, output, targets, lengths): - """ Computes the loss of the first `lengths` items in the chunk """ - # Put in (B, T) format and zero-out the unnecessary values - mask = len_to_mask(lengths, zeros=False).t() - - # Inflate to feature dimension - mask = mask.unsqueeze(-1).repeat(1, 1, output.shape[-1]) - output = output * mask - targets = targets * mask - - # compute the loss with respect to the appropriate lengths and average across the batch-size - # We compute for every output (x_i)_i=1^L and target (y_i)_i=1^L, loss = 1/L \sum (x_i - y_i)^2 - # And average across the mini-batch - losses = self._base_criterion(output, targets).sum(dim=2).sum(dim=0) - - # The TBPTT will compute a slightly different loss, but it is not problematic - loss = torch.dot((1.0 / lengths.float()), losses) / len(losses) - - return loss
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/ensemble/base.html b/docs/_modules/lightwood/ensemble/base.html deleted file mode 100644 index 167773144..000000000 --- a/docs/_modules/lightwood/ensemble/base.html +++ /dev/null @@ -1,253 +0,0 @@ - - - - - - - - - - lightwood.ensemble.base — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.ensemble.base
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.ensemble.base

-from typing import List
-
-import pandas as pd
-
-from lightwood.mixer.base import BaseMixer
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood.api.types import PredictionArguments
-
-
-
[docs]class BaseEnsemble: - """ - Base class for all ensembles. - - Ensembles wrap sets of Lightwood mixers, with the objective of generating better predictions based on the output of each mixer. - - There are two important methods for any ensemble to work: - 1. `__init__()` should prepare all mixers and internal ensemble logic. - 2. `__call__()` applies any aggregation rules to generate final predictions based on the output of each mixer. - - Class Attributes: - - mixers: List of mixers the ensemble will use. - - supports_proba: For classification tasks, whether the ensemble supports yielding per-class scores rather than only returning the predicted label. - - """ # noqa - data: EncodedDs - mixers: List[BaseMixer] - best_index: int # @TODO: maybe only applicable to BestOf - supports_proba: bool - - def __init__(self, target, mixers: List[BaseMixer], data: EncodedDs) -> None: - self.data = data - self.mixers = mixers - self.best_index = 0 - self.supports_proba = False - - def __call__(self, ds: EncodedDs, args: PredictionArguments) -> pd.DataFrame: - raise NotImplementedError()
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/ensemble/best_of.html b/docs/_modules/lightwood/ensemble/best_of.html deleted file mode 100644 index 325c121a5..000000000 --- a/docs/_modules/lightwood/ensemble/best_of.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - - - - - - lightwood.ensemble.best_of — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.ensemble.best_of
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.ensemble.best_of

-from typing import List, Optional
-
-import numpy as np
-import pandas as pd
-
-from lightwood.helpers.log import log
-from lightwood.helpers.numeric import is_nan_numeric
-from lightwood.mixer.base import BaseMixer
-from lightwood.ensemble.base import BaseEnsemble
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood.helpers.general import evaluate_accuracy
-
-
-
[docs]class BestOf(BaseEnsemble): - """ - This ensemble acts as a mixer selector. - After evaluating accuracy for all internal mixers with the validation data, it sets the best mixer as the underlying model. - """ # noqa - indexes_by_accuracy: List[float] - - def __init__(self, target, mixers: List[BaseMixer], data: EncodedDs, accuracy_functions, - args: PredictionArguments, ts_analysis: Optional[dict] = None) -> None: - super().__init__(target, mixers, data) - - score_list = [] - for _, mixer in enumerate(mixers): - score_dict = evaluate_accuracy( - data.data_frame, - mixer(data, args)['prediction'], - target, - accuracy_functions, - ts_analysis=ts_analysis - ) - avg_score = np.mean(list(score_dict.values())) - log.info(f'Mixer: {type(mixer).__name__} got accuracy: {avg_score}') - - if is_nan_numeric(avg_score): - avg_score = -pow(2, 63) - log.warning(f'Change the accuracy of mixer {type(mixer).__name__} to valid value: {avg_score}') - - score_list.append(avg_score) - - self.indexes_by_accuracy = list(reversed(np.array(score_list).argsort())) - self.supports_proba = self.mixers[self.indexes_by_accuracy[0]].supports_proba - log.info(f'Picked best mixer: {type(self.mixers[self.indexes_by_accuracy[0]]).__name__}') - - def __call__(self, ds: EncodedDs, args: PredictionArguments) -> pd.DataFrame: - if args.all_mixers: - predictions = {} - for mixer in self.mixers: - predictions[f'__mdb_mixer_{type(mixer).__name__}'] = mixer(ds, args=args)['prediction'] - return pd.DataFrame(predictions) - else: - for mixer_index in self.indexes_by_accuracy: - mixer = self.mixers[mixer_index] - try: - return mixer(ds, args=args) - except Exception as e: - if mixer.stable: - raise(e) - else: - log.warning(f'Unstable mixer {type(mixer).__name__} failed with exception: {e}.\ - Trying next best')
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/ensemble/mean_ensemble.html b/docs/_modules/lightwood/ensemble/mean_ensemble.html deleted file mode 100644 index 02194ed9c..000000000 --- a/docs/_modules/lightwood/ensemble/mean_ensemble.html +++ /dev/null @@ -1,241 +0,0 @@ - - - - - - - - - - lightwood.ensemble.mean_ensemble — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.ensemble.mean_ensemble
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.ensemble.mean_ensemble

-from typing import List
-
-import pandas as pd
-
-from lightwood.mixer.base import BaseMixer
-from lightwood.ensemble.base import BaseEnsemble
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood import dtype
-
-
-
[docs]class MeanEnsemble(BaseEnsemble): - def __init__(self, target, mixers: List[BaseMixer], data: EncodedDs, dtype_dict: dict) -> None: - super().__init__(target, mixers, data) - if dtype_dict[target] not in (dtype.float, dtype.integer, dtype.quantity): - raise Exception( - f'This ensemble can only be used regression problems! Got target dtype {dtype_dict[target]} instead!') - - def __call__(self, ds: EncodedDs, args: PredictionArguments) -> pd.DataFrame: - predictions_df = pd.DataFrame() - for mixer in self.mixers: - predictions_df[f'__mdb_mixer_{type(mixer).__name__}'] = mixer(ds, args=args)['prediction'] - - return pd.DataFrame(predictions_df.mean(axis='columns'), columns=['prediction'])
- -
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/ensemble/mode_ensemble.html b/docs/_modules/lightwood/ensemble/mode_ensemble.html deleted file mode 100644 index 710aee7ea..000000000 --- a/docs/_modules/lightwood/ensemble/mode_ensemble.html +++ /dev/null @@ -1,296 +0,0 @@ - - - - - - - - - - lightwood.ensemble.mode_ensemble — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.ensemble.mode_ensemble
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.ensemble.mode_ensemble

-from typing import List, Optional, Dict
-
-import pandas as pd
-import numpy as np
-
-from lightwood.mixer.base import BaseMixer
-from lightwood.ensemble.base import BaseEnsemble
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood import dtype
-from lightwood.helpers.general import evaluate_accuracy
-from lightwood.helpers.numeric import is_nan_numeric
-from lightwood.helpers.log import log
-
-
-
[docs]class ModeEnsemble(BaseEnsemble): - mixer_scores: Dict[str, float] - - def __init__(self, target, mixers: List[BaseMixer], data: EncodedDs, dtype_dict: dict, - accuracy_functions, args: PredictionArguments, ts_analysis: Optional[dict] = None) -> None: - super().__init__(target, mixers, data) - self.mixer_scores = {} - - if dtype_dict[target] not in (dtype.binary, dtype.categorical, dtype.tags): - raise Exception( - 'This ensemble can only be used in classification problems! ' + - f'Got target dtype {dtype_dict[target]} instead!') - - for _, mixer in enumerate(mixers): - score_dict = evaluate_accuracy( - data.data_frame, - mixer(data, args)['prediction'], - target, - accuracy_functions, - ts_analysis=ts_analysis - ) - avg_score = np.mean(list(score_dict.values())) - log.info(f'Mixer: {type(mixer).__name__} got accuracy: {avg_score}') - - if is_nan_numeric(avg_score): - avg_score = -pow(2, 63) - log.warning(f'Change the accuracy of mixer {type(mixer).__name__} to valid value: {avg_score}') - - self.mixer_scores[f'__mdb_mixer_{type(mixer).__name__}'] = avg_score - - def _pick_mode_highest_score(self, prediction: pd.Series): - """If the predictions are unimodal, return the mode. If there are multiple modes, return the mode whose voting - mixers have the highest score.""" - prediction_counts = prediction.value_counts() - - # If there is a clear winner, i.e. only one prediction - if len(prediction_counts) == 1: - return prediction_counts.index[0] - - counts = prediction_counts.values # how many times all predictions have appeared - max_count = np.max(counts) # how many times the most frequent predictions have apppeared - - # most frequent predictions and how many times they appeared - modes = prediction_counts[prediction_counts == max_count] - - modes_predictions = modes.index # most frequent predictions - - # For each mode, get the sum of the scores of the predictors who voted for it - modes_predictions_scores = {} - for mode_prediction in modes_predictions: - voting_mixers_name = prediction[prediction == mode_prediction].index.tolist() - modes_predictions_scores[mode_prediction] = np.sum( - [self.mixer_scores[mixer_name] for mixer_name in voting_mixers_name]) - - # Return the mode with the maximum sum of accuracies - return max(modes_predictions_scores, key=modes_predictions_scores.get) - - def __call__(self, ds: EncodedDs, args: PredictionArguments) -> pd.DataFrame: - predictions_df = pd.DataFrame() - for mixer in self.mixers: - predictions_df[f'__mdb_mixer_{type(mixer).__name__}'] = mixer(ds, args=args)['prediction'] - - mode_df = predictions_df.apply(func=self._pick_mode_highest_score, axis='columns') - - return pd.DataFrame(mode_df, columns=['prediction'])
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/ensemble/weighted_mean_ensemble.html b/docs/_modules/lightwood/ensemble/weighted_mean_ensemble.html deleted file mode 100644 index 831e4fc37..000000000 --- a/docs/_modules/lightwood/ensemble/weighted_mean_ensemble.html +++ /dev/null @@ -1,273 +0,0 @@ - - - - - - - - - - lightwood.ensemble.weighted_mean_ensemble — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.ensemble.weighted_mean_ensemble
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.ensemble.weighted_mean_ensemble

-from typing import List, Optional
-
-import numpy as np
-import pandas as pd
-
-from lightwood.helpers.log import log
-from lightwood.helpers.numeric import is_nan_numeric
-from lightwood.mixer.base import BaseMixer
-from lightwood.ensemble.base import BaseEnsemble
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood.helpers.general import evaluate_accuracy
-from lightwood import dtype
-
-
-
[docs]class WeightedMeanEnsemble(BaseEnsemble): - def __init__(self, target, mixers: List[BaseMixer], data: EncodedDs, args: PredictionArguments, - dtype_dict: dict, accuracy_functions, ts_analysis: Optional[dict] = None) -> None: - super().__init__(target, mixers, data) - if dtype_dict[target] not in (dtype.float, dtype.integer, dtype.quantity): - raise Exception( - f'This ensemble can only be used regression problems! Got target dtype {dtype_dict[target]} instead!') - - score_list = [] - for _, mixer in enumerate(mixers): - score_dict = evaluate_accuracy( - data.data_frame, - mixer(data, args)['prediction'], - target, - accuracy_functions, - ts_analysis=ts_analysis - ) - avg_score = np.mean(list(score_dict.values())) - log.info(f'Mixer: {type(mixer).__name__} got accuracy: {avg_score}') - - if is_nan_numeric(avg_score): - log.warning(f'Could not compute a valid accuracy for mixer: {type(mixer).__name__}, \ - functions: {accuracy_functions}, yielded invalid average score {avg_score}, \ - resetting that to -pow(2,63) instead.') - avg_score = -pow(2, 63) - - score_list.append(avg_score) - - self.weights = self.accuracies_to_weights(np.array(score_list)) - - def __call__(self, ds: EncodedDs, args: PredictionArguments) -> pd.DataFrame: - df = pd.DataFrame() - for mixer in self.mixers: - df[f'__mdb_mixer_{type(mixer).__name__}'] = mixer(ds, args=args)['prediction'] - - avg_predictions_df = df.apply(lambda x: np.average(x, weights=self.weights), axis='columns') - return pd.DataFrame(avg_predictions_df, columns=['prediction']) - - def accuracies_to_weights(self, x: np.array) -> np.array: - # Converts accuracies to weights using the softmax function. - e_x = np.exp(x - np.max(x)) - return e_x / e_x.sum()
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/mixer/base.html b/docs/_modules/lightwood/mixer/base.html deleted file mode 100644 index 8ed89c2f1..000000000 --- a/docs/_modules/lightwood/mixer/base.html +++ /dev/null @@ -1,283 +0,0 @@ - - - - - - - - - - lightwood.mixer.base — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.mixer.base
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.mixer.base

-import pandas as pd
-
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood.api.types import PredictionArguments
-
-
-
[docs]class BaseMixer: - """ - Base class for all mixers. - - Mixers are the backbone of all Lightwood machine learning models. They intake encoded feature representations for every column, and are tasked with learning to fulfill the predictive requirements stated in a problem definition. - - There are two important methods for any mixer to work: - 1. `fit()` contains all logic to train the mixer with the training data that has been encoded by all the (already trained) Lightwood encoders for any given task. - 2. `__call__()` is executed to generate predictions once the mixer has been trained using `fit()`. - - An additional `partial_fit()` method is used to update any mixer that has already been trained. - - Class Attributes: - - stable: If set to `True`, this mixer should always work. Any mixer with `stable=False` can be expected to fail under some circumstances. - - fit_data_len: Length of the training data. - - supports_proba: For classification tasks, whether the mixer supports yielding per-class scores rather than only returning the predicted label. - - """ # noqa - stable: bool - fit_data_len: int # @TODO (Patricio): should this really be in `BaseMixer`? - supports_proba: bool - - def __init__(self, stop_after: int): - """ - Initializer a mixer. - - :param stop_after: Time budget to train this mixer. - """ - self.stop_after = stop_after - self.supports_proba = False - -
[docs] def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - """ - Fits/trains a mixer with training data. - - :param train_data: encoded representations of the training data subset. - :param dev_data: encoded representations of the "dev" data subset. This can be used as an internal validation subset (e.g. it is used for early stopping in the default `Neural` mixer). - - """ # noqa - raise NotImplementedError()
- - def __call__(self, ds: EncodedDs, - args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: - """ - Calls a trained mixer to predict the target column given some input data. - - :param ds: encoded representations of input data. - :param args: a `lightwood.api.types.PredictionArguments` object, including all relevant inference-time arguments to customize the behavior. - :return: - """ # noqa - raise NotImplementedError() - -
[docs] def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - """ - Partially fits/trains a mixer with new training data. This is a somewhat experimental method, and it aims at updating pre-existing Lightwood predictors. - - :param train_data: encoded representations of the new training data subset. - :param dev_data: encoded representations of new the "dev" data subset. As in `fit()`, this can be used as an internal validation subset. - - """ # noqa - pass
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/mixer/lightgbm.html b/docs/_modules/lightwood/mixer/lightgbm.html deleted file mode 100644 index 776e044ac..000000000 --- a/docs/_modules/lightwood/mixer/lightgbm.html +++ /dev/null @@ -1,461 +0,0 @@ - - - - - - - - - - lightwood.mixer.lightgbm — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.mixer.lightgbm
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.mixer.lightgbm

-import time
-from typing import Dict, List, Set
-
-import torch
-import optuna
-import lightgbm
-import numpy as np
-import pandas as pd
-from sklearn.preprocessing import OrdinalEncoder
-import optuna.integration.lightgbm as optuna_lightgbm
-
-from lightwood.api import dtype
-from lightwood.helpers.log import log
-from lightwood.mixer.base import BaseMixer
-from lightwood.helpers.device import get_devices
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import EncodedDs
-
-
-optuna.logging.set_verbosity(optuna.logging.CRITICAL)
-
-
-def check_gpu_support():
-    try:
-        data = np.random.rand(50, 2)
-        label = np.random.randint(2, size=50)
-        train_data = lightgbm.Dataset(data, label=label)
-        params = {'num_iterations': 1, 'device': 'gpu'}
-        lightgbm.train(params, train_set=train_data)
-        device, nr_devices = get_devices()
-        if nr_devices > 0 and str(device) != 'cpu':
-            return True
-        else:
-            return False
-    except Exception:
-        return False
-
-
-
[docs]class LightGBM(BaseMixer): - model: lightgbm.LGBMModel - ordinal_encoder: OrdinalEncoder - label_set: Set[str] - max_bin: int - device: torch.device - device_str: str - num_iterations: int - use_optuna: bool - supports_proba: bool - - def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], - input_cols: List[str], - fit_on_dev: bool, use_optuna: bool = True): - super().__init__(stop_after) - self.model = None - self.ordinal_encoder = None - self.positive_domain = False - self.label_set = set() - self.target = target - self.dtype_dict = dtype_dict - self.input_cols = input_cols - self.use_optuna = use_optuna - self.params = {} - self.fit_on_dev = fit_on_dev - self.supports_proba = dtype_dict[target] in [dtype.binary, dtype.categorical] - self.stable = True - - # GPU Only available via --install-option=--gpu with opencl-dev and libboost dev (a bunch of them) installed, so let's turn this off for now and we can put it behind some flag later # noqa - gpu_works = check_gpu_support() - if not gpu_works: - self.device = torch.device('cpu') - self.device_str = 'cpu' - log.warning('LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead') # noqa - else: - self.device = torch.device('cuda') - self.device_str = 'gpu' - - self.max_bin = 255 - - def _to_dataset(self, data, output_dtype): - for subset_name in data.keys(): - for input_col in self.input_cols: - if data[subset_name]['data'] is None: - data[subset_name]['data'] = data[subset_name]['ds'].get_encoded_column_data( - input_col).to(self.device) - else: - enc_col = data[subset_name]['ds'].get_encoded_column_data(input_col) - data[subset_name]['data'] = torch.cat((data[subset_name]['data'], enc_col.to(self.device)), 1) - - data[subset_name]['data'] = data[subset_name]['data'].numpy() - - label_data = data[subset_name]['ds'].get_column_original_data(self.target) - - if output_dtype in (dtype.categorical, dtype.binary): - if subset_name == 'train': - self.ordinal_encoder = OrdinalEncoder() - self.label_set = set(label_data) - self.label_set.add('__mdb_unknown_cat') - self.ordinal_encoder.fit(np.array(list(self.label_set)).reshape(-1, 1)) - - label_data = [x if x in self.label_set else '__mdb_unknown_cat' for x in label_data] - label_data = self.ordinal_encoder.transform(np.array(label_data).reshape(-1, 1)).flatten() - elif output_dtype == dtype.integer: - label_data = label_data.clip(-pow(2, 63), pow(2, 63)).astype(int) - elif output_dtype in (dtype.float, dtype.quantity): - label_data = label_data.astype(float) - - data[subset_name]['label_data'] = label_data - - return data - -
[docs] def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - log.info('Started fitting LGBM model') - data = { - 'train': {'ds': train_data, 'data': None, 'label_data': {}}, - 'dev': {'ds': dev_data, 'data': None, 'label_data': {}} - } - self.fit_data_len = len(data['train']['ds']) - self.positive_domain = getattr(train_data.encoders.get(self.target, None), 'positive_domain', False) - - output_dtype = self.dtype_dict[self.target] - - data = self._to_dataset(data, output_dtype) - - if output_dtype not in (dtype.categorical, dtype.integer, dtype.float, dtype.binary, dtype.quantity): - log.error(f'Lightgbm mixer not supported for type: {output_dtype}') - raise Exception(f'Lightgbm mixer not supported for type: {output_dtype}') - else: - objective = 'regression' if output_dtype in (dtype.integer, dtype.float, dtype.quantity) else 'multiclass' - metric = 'l2' if output_dtype in (dtype.integer, dtype.float, dtype.quantity) else 'multi_logloss' - - self.params = { - 'objective': objective, - 'metric': metric, - 'verbose': -1, - 'lambda_l1': 0.1, - 'lambda_l2': 0.1, - 'force_row_wise': True, - 'device_type': self.device_str, - } - - if objective == 'multiclass': - self.all_classes = self.ordinal_encoder.categories_[0] - self.params['num_class'] = self.all_classes.size - if self.device_str == 'gpu': - self.params['gpu_use_dp'] = True - - # Determine time per iterations - start = time.time() - self.params['num_iterations'] = 1 - self.model = lightgbm.train(self.params, lightgbm.Dataset( - data['train']['data'], - label=data['train']['label_data']), - verbose_eval=False) - end = time.time() - seconds_for_one_iteration = max(0.1, end - start) - - # Determine nr of iterations - log.info(f'A single GBM iteration takes {seconds_for_one_iteration} seconds') - self.num_iterations = int(self.stop_after * 0.8 / seconds_for_one_iteration) - - # Turn on grid search if training doesn't take too long using it - kwargs = {} - if self.use_optuna and self.num_iterations >= 200: - model_generator = optuna_lightgbm - kwargs['time_budget'] = self.stop_after * 0.4 - self.num_iterations = int(self.num_iterations / 2) - kwargs['optuna_seed'] = 0 - else: - model_generator = lightgbm - - # Prepare the data - train_dataset = lightgbm.Dataset(data['train']['data'], label=data['train']['label_data']) - dev_dataset = lightgbm.Dataset(data['dev']['data'], label=data['dev']['label_data']) - - # Train the models - log.info( - f'Training GBM ({model_generator}) with {self.num_iterations} iterations given {self.stop_after} seconds constraint') # noqa - if self.num_iterations < 1: - self.num_iterations = 1 - self.params['num_iterations'] = int(self.num_iterations) - - self.params['early_stopping_rounds'] = 5 - - self.model = model_generator.train( - self.params, train_dataset, valid_sets=[dev_dataset, train_dataset], - valid_names=['dev', 'train'], - verbose_eval=False, **kwargs) - self.num_iterations = self.model.best_iteration - log.info(f'Lightgbm model contains {self.model.num_trees()} weak estimators') - - if self.fit_on_dev: - self.partial_fit(dev_data, train_data)
- -
[docs] def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - pct_of_original = len(train_data) / self.fit_data_len - iterations = max(1, int(self.num_iterations * pct_of_original) / 2) - - data = {'retrain': {'ds': train_data, 'data': None, 'label_data': {}}, 'dev': { - 'ds': dev_data, 'data': None, 'label_data': {}}} - - output_dtype = self.dtype_dict[self.target] - data = self._to_dataset(data, output_dtype) - - train_dataset = lightgbm.Dataset(data['retrain']['data'], label=data['retrain']['label_data']) - dev_dataset = lightgbm.Dataset(data['dev']['data'], label=data['dev']['label_data']) - - log.info(f'Updating lightgbm model with {iterations} iterations') - if iterations < 1: - iterations = 1 - self.params['num_iterations'] = int(iterations) - self.model = lightgbm.train( - self.params, train_dataset, valid_sets=[dev_dataset, train_dataset], - valid_names=['dev', 'retrain'], - verbose_eval=False, init_model=self.model) - log.info(f'Model now has a total of {self.model.num_trees()} weak estimators')
- - def __call__(self, ds: EncodedDs, - args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: - data = None - for input_col in self.input_cols: - if data is None: - data = ds.get_encoded_column_data(input_col).to(self.device) - else: - data = torch.cat((data, ds.get_encoded_column_data(input_col).to(self.device)), 1) - - data = data.numpy() - raw_predictions = self.model.predict(data) - - if self.ordinal_encoder is not None: - decoded_predictions = self.ordinal_encoder.inverse_transform( - np.argmax(raw_predictions, axis=1).reshape(-1, 1)).flatten() - else: - decoded_predictions = raw_predictions - - if self.positive_domain: - decoded_predictions = [max(0, p) for p in decoded_predictions] - - ydf = pd.DataFrame({'prediction': decoded_predictions}) - - if args.predict_proba and self.ordinal_encoder is not None: - for idx, label in enumerate(self.ordinal_encoder.categories_[0].tolist()): - ydf[f'__mdb_proba_{label}'] = raw_predictions[:, idx] - - return ydf
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/mixer/lightgbm_array.html b/docs/_modules/lightwood/mixer/lightgbm_array.html deleted file mode 100644 index fb715581b..000000000 --- a/docs/_modules/lightwood/mixer/lightgbm_array.html +++ /dev/null @@ -1,285 +0,0 @@ - - - - - - - - - - lightwood.mixer.lightgbm_array — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.mixer.lightgbm_array
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.mixer.lightgbm_array

-import numpy as np
-import pandas as pd
-from typing import Dict, List, Union
-
-from lightwood.api import dtype
-from lightwood.helpers.log import log
-from lightwood.mixer.base import BaseMixer
-from lightwood.mixer.lightgbm import LightGBM
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import EncodedDs, ConcatedEncodedDs
-
-
-
[docs]class LightGBMArray(BaseMixer): - """LightGBM-based model, intended for usage in time series tasks.""" - models: List[LightGBM] - n_ts_predictions: int - submodel_stop_after: float - target: str - supports_proba: bool - - def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], - input_cols: List[str], - n_ts_predictions: int, fit_on_dev: bool): - super().__init__(stop_after) - self.submodel_stop_after = stop_after / n_ts_predictions - self.target = target - dtype_dict[target] = dtype.float - self.models = [LightGBM(self.submodel_stop_after, target, dtype_dict, input_cols, fit_on_dev, use_optuna=False) - for _ in range(n_ts_predictions)] - self.n_ts_predictions = n_ts_predictions # for time series tasks, how long is the forecast horizon - self.supports_proba = False - self.stable = True - -
[docs] def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - log.info('Started fitting LGBM models for array prediction') - - for timestep in range(self.n_ts_predictions): - if timestep > 0: - train_data.data_frame[self.target] = train_data.data_frame[f'{self.target}_timestep_{timestep}'] - dev_data.data_frame[self.target] = dev_data.data_frame[f'{self.target}_timestep_{timestep}'] - - self.models[timestep].fit(train_data, dev_data) # @TODO: this call could be parallelized
- -
[docs] def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - log.info('Updating array of LGBM models...') - - for timestep in range(self.n_ts_predictions): - if timestep > 0: - train_data.data_frame[self.target] = train_data.data_frame[f'{self.target}_timestep_{timestep}'] - dev_data.data_frame[self.target] = dev_data.data_frame[f'{self.target}_timestep_{timestep}'] - - self.models[timestep].partial_fit(train_data, dev_data) # @TODO: this call could be parallelized
- - def __call__(self, ds: Union[EncodedDs, ConcatedEncodedDs], - args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: - if args.predict_proba: - log.warning('This model does not output probability estimates') - - length = sum(ds.encoded_ds_lenghts) if isinstance(ds, ConcatedEncodedDs) else len(ds) - ydf = pd.DataFrame(0, # zero-filled - index=np.arange(length), - columns=[f'prediction_{i}' for i in range(self.n_ts_predictions)]) - - for timestep in range(self.n_ts_predictions): - ydf[f'prediction_{timestep}'] = self.models[timestep](ds, args) - - ydf['prediction'] = ydf.values.tolist() - return ydf[['prediction']]
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/mixer/neural.html b/docs/_modules/lightwood/mixer/neural.html deleted file mode 100644 index bfb6ff25e..000000000 --- a/docs/_modules/lightwood/mixer/neural.html +++ /dev/null @@ -1,563 +0,0 @@ - - - - - - - - - - lightwood.mixer.neural — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.mixer.neural
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.mixer.neural

-import time
-from copy import deepcopy
-from typing import Dict, List
-
-import torch
-import numpy as np
-import pandas as pd
-from torch import nn
-import torch_optimizer as ad_optim
-from sklearn.metrics import r2_score
-from torch.cuda.amp import GradScaler
-from torch.utils.data import DataLoader
-from torch.nn.modules.loss import MSELoss
-from torch.optim.optimizer import Optimizer
-
-from lightwood.api import dtype
-from lightwood.helpers.log import log
-from lightwood.encoder.base import BaseEncoder
-from lightwood.helpers.torch import LightwoodAutocast
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood.mixer.base import BaseMixer
-from lightwood.mixer.helpers.ar_net import ArNet
-from lightwood.mixer.helpers.default_net import DefaultNet
-from lightwood.api.types import TimeseriesSettings, PredictionArguments
-from lightwood.mixer.helpers.transform_corss_entropy_loss import TransformCrossEntropyLoss
-
-
-
[docs]class Neural(BaseMixer): - model: nn.Module - dtype_dict: dict - target: str - epochs_to_best: int - fit_on_dev: bool - supports_proba: bool - - def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], - timeseries_settings: TimeseriesSettings, target_encoder: BaseEncoder, net: str, fit_on_dev: bool, - search_hyperparameters: bool): - """ - The Neural mixer trains a fully connected dense network from concatenated encoded outputs of each of the features in the dataset to predicted the encoded output. - - :param stop_after: How long the total fitting process should take - :param target: Name of the target column - :param dtype_dict: Data type dictionary - :param timeseries_settings: TimeseriesSettings object for time-series tasks, refer to its documentation for available settings. - :param target_encoder: Reference to the encoder used for the target - :param net: The network type to use (`DeafultNet` or `ArNet`) - :param fit_on_dev: If we should fit on the dev dataset - :param search_hyperparameters: If the network should run a more through hyperparameter search (currently disabled) - """ # noqa - super().__init__(stop_after) - self.dtype_dict = dtype_dict - self.target = target - self.timeseries_settings = timeseries_settings - self.target_encoder = target_encoder - self.epochs_to_best = 0 - self.fit_on_dev = fit_on_dev - self.net_class = DefaultNet if net == 'DefaultNet' else ArNet - self.supports_proba = dtype_dict[target] in [dtype.binary, dtype.categorical] - self.search_hyperparameters = search_hyperparameters - self.stable = True - - def _final_tuning(self, data): - if self.dtype_dict[self.target] in (dtype.integer, dtype.float, dtype.quantity): - self.model = self.model.eval() - with torch.no_grad(): - acc_dict = {} - for decode_log in [True, False]: - self.target_encoder.decode_log = decode_log - decoded_predictions = [] - decoded_real_values = [] - for X, Y in data: - X = X.to(self.model.device) - Y = Y.to(self.model.device) - Yh = self.model(X) - - Yh = torch.unsqueeze(Yh, 0) if len(Yh.shape) < 2 else Yh - Y = torch.unsqueeze(Y, 0) if len(Y.shape) < 2 else Y - - decoded_predictions.extend(self.target_encoder.decode(Yh)) - decoded_real_values.extend(self.target_encoder.decode(Y)) - - acc_dict[decode_log] = r2_score(decoded_real_values, decoded_predictions) - - self.target_encoder.decode_log = acc_dict[True] > acc_dict[False] - - def _select_criterion(self) -> torch.nn.Module: - if self.dtype_dict[self.target] in (dtype.categorical, dtype.binary): - criterion = TransformCrossEntropyLoss(weight=self.target_encoder.index_weights.to(self.model.device)) - elif self.dtype_dict[self.target] in (dtype.tags): - criterion = nn.BCEWithLogitsLoss() - elif (self.dtype_dict[self.target] in (dtype.integer, dtype.float, dtype.tsarray, dtype.quantity) - and self.timeseries_settings.is_timeseries): - criterion = nn.L1Loss() - elif self.dtype_dict[self.target] in (dtype.integer, dtype.float, dtype.quantity): - criterion = MSELoss() - else: - criterion = MSELoss() - - return criterion - - def _select_optimizer(self) -> Optimizer: - # ad_optim.Ranger - # torch.optim.AdamW - if self.timeseries_settings.is_timeseries: - optimizer = ad_optim.Ranger(self.model.parameters(), lr=self.lr) - else: - optimizer = ad_optim.Ranger(self.model.parameters(), lr=self.lr, weight_decay=2e-2) - - return optimizer - - def _find_lr(self, dl): - optimizer = self._select_optimizer() - criterion = self._select_criterion() - scaler = GradScaler() - - running_losses: List[float] = [] - cum_loss = 0 - lr_log = [] - best_model = self.model - stop = False - batches = 0 - for epoch in range(1, 101): - if stop: - break - - for i, (X, Y) in enumerate(dl): - if stop: - break - - batches += len(X) - X = X.to(self.model.device) - Y = Y.to(self.model.device) - with LightwoodAutocast(): - optimizer.zero_grad() - Yh = self.model(X) - loss = criterion(Yh, Y) - if LightwoodAutocast.active: - scaler.scale(loss).backward() - scaler.step(optimizer) - scaler.update() - else: - loss.backward() - optimizer.step() - cum_loss += loss.item() - - # Account for ranger lookahead update - if (i + 1) * epoch % 6: - batches = 0 - lr = optimizer.param_groups[0]['lr'] - log.info(f'Loss of {cum_loss} with learning rate {lr}') - running_losses.append(cum_loss) - lr_log.append(lr) - cum_loss = 0 - if len(running_losses) < 2 or np.mean(running_losses[:-1]) > np.mean(running_losses): - optimizer.param_groups[0]['lr'] = lr * 1.4 - # Time saving since we don't have to start training fresh - best_model = deepcopy(self.model) - else: - stop = True - - best_loss_lr = lr_log[np.argmin(running_losses)] - lr = best_loss_lr - log.info(f'Found learning rate of: {lr}') - return lr, best_model - - def _max_fit(self, train_dl, dev_dl, criterion, optimizer, scaler, stop_after, return_model_after): - started = time.time() - epochs_to_best = 0 - best_dev_error = pow(2, 32) - running_errors = [] - best_model = self.model - - for epoch in range(1, return_model_after + 1): - self.model = self.model.train() - running_losses: List[float] = [] - for i, (X, Y) in enumerate(train_dl): - X = X.to(self.model.device) - Y = Y.to(self.model.device) - with LightwoodAutocast(): - optimizer.zero_grad() - Yh = self.model(X) - loss = criterion(Yh, Y) - if LightwoodAutocast.active: - scaler.scale(loss).backward() - scaler.step(optimizer) - scaler.update() - else: - loss.backward() - optimizer.step() - - running_losses.append(loss.item()) - - train_error = np.mean(running_losses) - epoch_error = self._error(dev_dl, criterion) - running_errors.append(epoch_error) - log.info(f'Loss @ epoch {epoch}: {epoch_error}') - - if np.isnan(train_error) or np.isnan( - running_errors[-1]) or np.isinf(train_error) or np.isinf( - running_errors[-1]): - break - - if best_dev_error > running_errors[-1]: - best_dev_error = running_errors[-1] - best_model = deepcopy(self.model) - epochs_to_best = epoch - - if len(running_errors) >= 5: - delta_mean = np.average([running_errors[-i - 1] - running_errors[-i] for i in range(1, 5)], - weights=[(1 / 2)**i for i in range(1, 5)]) - if delta_mean <= 0: - break - elif (time.time() - started) > stop_after: - break - elif running_errors[-1] < 0.0001 or train_error < 0.0001: - break - - if np.isnan(best_dev_error): - best_dev_error = pow(2, 32) - return best_model, epochs_to_best, best_dev_error - - def _error(self, dev_dl, criterion) -> float: - self.model = self.model.eval() - running_losses: List[float] = [] - with torch.no_grad(): - for X, Y in dev_dl: - X = X.to(self.model.device) - Y = Y.to(self.model.device) - Yh = self.model(X) - running_losses.append(criterion(Yh, Y).item()) - return np.mean(running_losses) - - def _init_net(self, ds: EncodedDs): - net_kwargs = {'input_size': len(ds[0][0]), - 'output_size': len(ds[0][1]), - 'num_hidden': self.num_hidden, - 'dropout': 0} - - if self.net_class == ArNet: - net_kwargs['encoder_span'] = ds.encoder_spans - net_kwargs['target_name'] = self.target - - self.model = self.net_class(**net_kwargs) - - # @TODO: Compare partial fitting fully on and fully off on the benchmarks! - # @TODO: Writeup on the methodology for partial fitting -
[docs] def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - """ - Fits the Neural mixer on some data, making it ready to predit - - :param train_data: The EncodedDs on which to train the network - :param dev_data: Data used for early stopping and hyperparameter determination - """ - # ConcatedEncodedDs - self.batch_size = min(200, int(len(train_data) / 10)) - self.batch_size = max(40, self.batch_size) - - dev_dl = DataLoader(dev_data, batch_size=self.batch_size, shuffle=False) - train_dl = DataLoader(train_data, batch_size=self.batch_size, shuffle=False) - - self.lr = 1e-4 - self.num_hidden = 1 - - # Find learning rate - # keep the weights - self._init_net(train_data) - self.lr, self.model = self._find_lr(train_dl) - - # Keep on training - optimizer = self._select_optimizer() - criterion = self._select_criterion() - scaler = GradScaler() - - self.model, epoch_to_best_model, err = self._max_fit( - train_dl, dev_dl, criterion, optimizer, scaler, self.stop_after, return_model_after=20000) - - self.epochs_to_best += epoch_to_best_model - - if self.fit_on_dev: - self.partial_fit(dev_data, train_data) - self._final_tuning(dev_data)
- -
[docs] def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - """ - Augments the mixer's fit with new data, nr of epochs is based on the amount of epochs the original fitting took - - :param train_data: The EncodedDs on which to train the network - :param dev_data: Data used for early stopping and hyperparameter determination - """ - - # Based this on how long the initial training loop took, at a low learning rate as to not mock anything up tooo badly # noqa - train_dl = DataLoader(train_data, batch_size=self.batch_size, shuffle=True) - dev_dl = DataLoader(dev_data, batch_size=self.batch_size, shuffle=True) - optimizer = self._select_optimizer() - criterion = self._select_criterion() - scaler = GradScaler() - - self.model, _, _ = self._max_fit(train_dl, dev_dl, criterion, optimizer, scaler, - self.stop_after, max(1, int(self.epochs_to_best / 3)))
- - def __call__(self, ds: EncodedDs, - args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: - """ - Make predictions based on datasource similar to the one used to fit (sans the target column) - - :param ds: The EncodedDs for which to generate the predictions - :param arg: Argument for predicting - - :returns: A dataframe cotaining the decoded predictions and (depending on the args) additional information such as the probabilites for each target class - """ # noqa - self.model = self.model.eval() - decoded_predictions: List[object] = [] - all_probs: List[List[float]] = [] - rev_map = {} - - with torch.no_grad(): - for idx, (X, Y) in enumerate(ds): - X = X.to(self.model.device) - Yh = self.model(X) - Yh = torch.unsqueeze(Yh, 0) if len(Yh.shape) < 2 else Yh - - kwargs = {} - for dep in self.target_encoder.dependencies: - kwargs['dependency_data'] = {dep: ds.data_frame.iloc[idx][[dep]].values} - - if args.predict_proba and self.supports_proba: - kwargs['return_raw'] = True - decoded_prediction, probs, rev_map = self.target_encoder.decode(Yh, **kwargs) - all_probs.append(probs) - else: - decoded_prediction = self.target_encoder.decode(Yh, **kwargs) - - if not self.timeseries_settings.is_timeseries or self.timeseries_settings.nr_predictions == 1: - decoded_predictions.extend(decoded_prediction) - else: - decoded_predictions.append(decoded_prediction) - - ydf = pd.DataFrame({'prediction': decoded_predictions}) - - if args.predict_proba and self.supports_proba: - raw_predictions = np.array(all_probs).squeeze() - for idx, label in enumerate(rev_map.values()): - ydf[f'__mdb_proba_{label}'] = raw_predictions[:, idx] - - return ydf
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/mixer/regression.html b/docs/_modules/lightwood/mixer/regression.html deleted file mode 100644 index 9871b21e3..000000000 --- a/docs/_modules/lightwood/mixer/regression.html +++ /dev/null @@ -1,279 +0,0 @@ - - - - - - - - - - lightwood.mixer.regression — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.mixer.regression
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.mixer.regression

-import torch
-import pandas as pd
-from scipy.special import softmax
-from sklearn.linear_model import LinearRegression
-
-from lightwood.helpers.log import log
-from lightwood.api.dtype import dtype
-from lightwood.mixer import BaseMixer
-from lightwood.encoder.base import BaseEncoder
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import ConcatedEncodedDs, EncodedDs
-
-
-
[docs]class Regression(BaseMixer): - model: LinearRegression - label_map: dict - supports_proba: bool - - def __init__(self, stop_after: int, target_encoder: BaseEncoder, dtype_dict: dict, target: str): - super().__init__(stop_after) - self.target_encoder = target_encoder - self.target_dtype = dtype_dict[target] - self.supports_proba = self.target_dtype in [dtype.binary, dtype.categorical] - self.label_map = {} - self.stable = False - -
[docs] def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - if self.target_dtype not in (dtype.float, dtype.integer, dtype.quantity): - raise Exception(f'Unspported {self.target_dtype} type for regression') - log.info('Fitting Linear Regression model') - X = [] - Y = [] - for x, y in ConcatedEncodedDs([train_data, dev_data]): - X.append(x.tolist()) - Y.append(y.tolist()) - - if self.supports_proba: - self.label_map = self.target_encoder.rev_map - - self.model = LinearRegression().fit(X, Y) - log.info(f'Regression based correlation of: {self.model.score(X, Y)}')
- -
[docs] def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - self.fit(train_data, dev_data)
- - def __call__(self, ds: EncodedDs, - args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: - X = [] - for x, _ in ds: - X.append(x.tolist()) - - Yh = self.model.predict(X) - - decoded_predictions = self.target_encoder.decode(torch.Tensor(Yh)) - - ydf = pd.DataFrame({'prediction': decoded_predictions}) - - if args.predict_proba and self.label_map: - raw_predictions = softmax(Yh.squeeze(), axis=1) - for idx, label in enumerate(self.target_encoder.rev_map.values()): - ydf[f'__mdb_proba_{label}'] = raw_predictions[:, idx] - - return ydf
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/mixer/sktime.html b/docs/_modules/lightwood/mixer/sktime.html deleted file mode 100644 index cfabba4ed..000000000 --- a/docs/_modules/lightwood/mixer/sktime.html +++ /dev/null @@ -1,342 +0,0 @@ - - - - - - - - - - lightwood.mixer.sktime — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.mixer.sktime
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.mixer.sktime

-import numpy as np
-import pandas as pd
-from typing import Dict, Union
-from sktime.forecasting.arima import AutoARIMA
-
-from lightwood.api import dtype
-from lightwood.helpers.log import log
-from lightwood.mixer.base import BaseMixer
-from lightwood.api.types import PredictionArguments
-from lightwood.helpers.general import get_group_matches
-from lightwood.data.encoded_ds import EncodedDs, ConcatedEncodedDs
-
-
-
[docs]class SkTime(BaseMixer): - forecaster: str - n_ts_predictions: int - target: str - supports_proba: bool - - def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], - n_ts_predictions: int, ts_analysis: Dict): - super().__init__(stop_after) - self.target = target - dtype_dict[target] = dtype.float - self.model_class = AutoARIMA - self.models = {} - self.n_ts_predictions = n_ts_predictions - self.ts_analysis = ts_analysis - self.forecasting_horizon = np.arange(1, self.n_ts_predictions) - self.cutoff_index = {} # marks index at which training data stops and forecasting window starts - self.grouped_by = ['__default'] if not ts_analysis['tss'].group_by else ts_analysis['tss'].group_by - self.supports_proba = False - self.stable = True - self.prepared = False - -
[docs] def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - log.info('Started fitting sktime forecaster for array prediction') - - all_subsets = ConcatedEncodedDs([train_data, dev_data]) - df = all_subsets.data_frame.sort_values(by=f'__mdb_original_{self.ts_analysis["tss"].order_by[0]}') - data = {'data': df[self.target], - 'group_info': {gcol: df[gcol].tolist() - for gcol in self.grouped_by} if self.ts_analysis['tss'].group_by else {}} - - for group in self.ts_analysis['group_combinations']: - # many warnings might be thrown inside of statsmodels during stepwise procedure - self.models[group] = self.model_class(suppress_warnings=True) - - if self.grouped_by == ['__default']: - series_idxs = data['data'].index - series_data = data['data'].values - else: - series_idxs, series_data = get_group_matches(data, group) - - if series_data.size > 0: - series = pd.Series(series_data.squeeze(), index=series_idxs) - series = series.sort_index(ascending=True) - series = series.reset_index(drop=True) - try: - self.models[group].fit(series) - except ValueError: - self.models[group] = self.model_class(deseasonalize=False) - self.models[group].fit(series) - - self.cutoff_index[group] = len(series) - - if self.grouped_by == ['__default']: - break
- -
[docs] def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - """ - Note: sktime asks for "specification of the time points for which forecasts are requested", - and this mixer complies by assuming forecasts will start immediately after the last observed - value. - - Because of this, `partial_fit` ensures that both `dev` and `test` splits are used to fit the AutoARIMA model. - - Due to how lightwood implements the `update` procedure, expected inputs are (for a train-dev-test split): - - :param dev_data: original `test` split (used to validate and select model if ensemble is `BestOf`) - :param train_data: includes original `train` and `dev` split - """ # noqa - self.fit(dev_data, train_data) - self.prepared = True
- - def __call__(self, ds: Union[EncodedDs, ConcatedEncodedDs], - args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: - if args.predict_proba: - log.warning('This mixer does not output probability estimates') - - length = sum(ds.encoded_ds_lenghts) if isinstance(ds, ConcatedEncodedDs) else len(ds) - ydf = pd.DataFrame(0, # zero-filled - index=np.arange(length), - columns=['prediction'], - dtype=object) - - data = {'data': ds.data_frame[self.target].reset_index(drop=True), - 'group_info': {gcol: ds.data_frame[gcol].tolist() - for gcol in self.grouped_by} if self.ts_analysis['tss'].group_by else {}} - - # all_idxs = list(range(length)) # @TODO: substract, and assign empty predictions to remainder - - for group in self.ts_analysis['group_combinations']: - - if self.grouped_by == ['__default']: - series_idxs = data['data'].index - series_data = data['data'].values - else: - series_idxs, series_data = get_group_matches(data, group) - - if series_data.size > 0: - forecaster = self.models[group] if self.models[group].is_fitted else self.models['__default'] - - series = pd.Series(series_data.squeeze(), index=series_idxs) - series = series.sort_index(ascending=True) - series = series.reset_index(drop=True) - - for idx, _ in enumerate(series.iteritems()): - ydf['prediction'].iloc[series_idxs[idx]] = forecaster.predict( - np.arange(idx, idx + self.n_ts_predictions)).tolist() - - if self.grouped_by == ['__default']: - break - - return ydf[['prediction']]
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_modules/lightwood/mixer/unit.html b/docs/_modules/lightwood/mixer/unit.html deleted file mode 100644 index 76071fc93..000000000 --- a/docs/_modules/lightwood/mixer/unit.html +++ /dev/null @@ -1,262 +0,0 @@ - - - - - - - - - - lightwood.mixer.unit — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Module code »
  • - -
  • lightwood.mixer.unit
  • - - -
  • - -
  • - -
- - -
-
-
-
- -

Source code for lightwood.mixer.unit

-"""
-2021.07.16
-
-For encoders that already fine-tune on the targets (namely text)
-the unity mixer just arg-maxes the output of the encoder.
-"""
-
-from typing import List
-
-import torch
-import pandas as pd
-
-from lightwood.helpers.log import log
-from lightwood.mixer.base import BaseMixer
-from lightwood.encoder.base import BaseEncoder
-from lightwood.data.encoded_ds import EncodedDs
-from lightwood.api.types import PredictionArguments
-
-
-
[docs]class Unit(BaseMixer): - def __init__(self, stop_after: int, target_encoder: BaseEncoder): - super().__init__(stop_after) - self.target_encoder = target_encoder - self.supports_proba = False - self.stable = True - -
[docs] def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - log.info("Unit Mixer just borrows from encoder")
- -
[docs] def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: - pass
- - def __call__(self, ds: EncodedDs, - args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: - if args.predict_proba: - # @TODO: depending on the target encoder, this might be enabled - log.warning('This model does not output probability estimates') - - decoded_predictions: List[object] = [] - - for X, _ in ds: - decoded_prediction = self.target_encoder.decode(torch.unsqueeze(X, 0)) - decoded_predictions.extend(decoded_prediction) - - ydf = pd.DataFrame({"prediction": decoded_predictions}) - return ydf
-
- -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/_sources/analysis.rst.txt b/docs/_sources/analysis.rst.txt deleted file mode 100644 index 2885844f5..000000000 --- a/docs/_sources/analysis.rst.txt +++ /dev/null @@ -1,7 +0,0 @@ -:mod:`Analysis` -========================== - -Analyse mixer ensembles to extract static insights and train predict-time models for dynamic insights. - -.. automodule:: analysis - :members: \ No newline at end of file diff --git a/docs/_sources/api.rst.txt b/docs/_sources/api.rst.txt deleted file mode 100644 index f31245f63..000000000 --- a/docs/_sources/api.rst.txt +++ /dev/null @@ -1,15 +0,0 @@ -:mod:`API` -========================== - -The API module is how Lightwood interfaces with the user. - -.. toctree:: - :maxdepth: 1 - :caption: Table of Contents: - - api/high_level - api/dtype - api/types - api/predictor - api/json_ai - api/encode \ No newline at end of file diff --git a/docs/_sources/api/dtype.rst.txt b/docs/_sources/api/dtype.rst.txt deleted file mode 100644 index 179d02f1f..000000000 --- a/docs/_sources/api/dtype.rst.txt +++ /dev/null @@ -1,8 +0,0 @@ -Data Types (dtypes) --------------------- -Lightwood supports several data types used in standard machine learning pipelines. The ``dtype`` class is used to label columns of information as the right input format. The type inference procedure affects what feature engineering methodology is used on a labeled column. - -Currently, the supported way to encourage new data types is to include a custom tag in this file and to import a custom cleaning approach. Users may inherit the basic functionality of the cleaner and include their own flag specific to their data type. For steps on how to do this, please see the tutorials. - -.. autoclass:: api.dtype.dtype - :members: \ No newline at end of file diff --git a/docs/_sources/api/encode.rst.txt b/docs/_sources/api/encode.rst.txt deleted file mode 100644 index 264086f8c..000000000 --- a/docs/_sources/api/encode.rst.txt +++ /dev/null @@ -1,5 +0,0 @@ -Encode your data --------------------- - -.. automodule:: api.encode - :members: \ No newline at end of file diff --git a/docs/_sources/api/high_level.rst.txt b/docs/_sources/api/high_level.rst.txt deleted file mode 100644 index 5cc4a6ec4..000000000 --- a/docs/_sources/api/high_level.rst.txt +++ /dev/null @@ -1,5 +0,0 @@ -JSON-AI Config --------------------- - -.. automodule:: api.high_level - :members: \ No newline at end of file diff --git a/docs/_sources/api/json_ai.rst.txt b/docs/_sources/api/json_ai.rst.txt deleted file mode 100644 index 3f013f31b..000000000 --- a/docs/_sources/api/json_ai.rst.txt +++ /dev/null @@ -1,5 +0,0 @@ -JSON-AI Config --------------------- - -.. automodule:: api.json_ai - :members: \ No newline at end of file diff --git a/docs/_sources/api/predictor.rst.txt b/docs/_sources/api/predictor.rst.txt deleted file mode 100644 index 4cd83ad34..000000000 --- a/docs/_sources/api/predictor.rst.txt +++ /dev/null @@ -1,6 +0,0 @@ -Predictor Interface --------------------- -The ``PredictorInterface`` creates the skeletal structure around basic functionality of Lightwood. - -.. automodule:: api.predictor - :members: \ No newline at end of file diff --git a/docs/_sources/api/types.rst.txt b/docs/_sources/api/types.rst.txt deleted file mode 100644 index 9806a4ad0..000000000 --- a/docs/_sources/api/types.rst.txt +++ /dev/null @@ -1,7 +0,0 @@ -Lightwood API Types --------------------- -Lightwood consists of several high level abstractions to enable the data science/machine learning (DS/ML) pipeline in a step-by-step procedure. - -.. automodule:: api.types - :members: - :member-order: bysource \ No newline at end of file diff --git a/docs/_sources/data.rst.txt b/docs/_sources/data.rst.txt deleted file mode 100644 index 3ea9748d2..000000000 --- a/docs/_sources/data.rst.txt +++ /dev/null @@ -1,7 +0,0 @@ -:mod:`Data` -========================== - -The focus of these modules is on storing, transforming, cleaning, splitting, merging, getting and removing data. - -.. automodule:: data - :members: \ No newline at end of file diff --git a/docs/_sources/data/cleaner.rst.txt b/docs/_sources/data/cleaner.rst.txt deleted file mode 100644 index 09efa5de3..000000000 --- a/docs/_sources/data/cleaner.rst.txt +++ /dev/null @@ -1,5 +0,0 @@ -Data Cleaning --------------------- - -.. automodule:: data.cleaner - :members: \ No newline at end of file diff --git a/docs/_sources/encoder.rst.txt b/docs/_sources/encoder.rst.txt deleted file mode 100644 index 839e7f770..000000000 --- a/docs/_sources/encoder.rst.txt +++ /dev/null @@ -1,7 +0,0 @@ -:mod:`Encoders` -========================== - -Used for encoding data into PyTorch tensors and decoding it from pytorch tensors - -.. automodule:: encoder - :members: diff --git a/docs/_sources/ensemble.rst.txt b/docs/_sources/ensemble.rst.txt deleted file mode 100644 index 82c01f068..000000000 --- a/docs/_sources/ensemble.rst.txt +++ /dev/null @@ -1,7 +0,0 @@ -:mod:`Ensemble` -========================== - -Ensemble mixers together in order to generate predictions - -.. automodule:: ensemble - :members: \ No newline at end of file diff --git a/docs/_sources/helpers.rst.txt b/docs/_sources/helpers.rst.txt deleted file mode 100644 index 959ebb231..000000000 --- a/docs/_sources/helpers.rst.txt +++ /dev/null @@ -1,7 +0,0 @@ -:mod:`Helpers` -========================== - -Various helper functions - -.. automodule:: helpers - :members: \ No newline at end of file diff --git a/docs/_sources/index.rst.txt b/docs/_sources/index.rst.txt deleted file mode 100644 index e6ac21efb..000000000 --- a/docs/_sources/index.rst.txt +++ /dev/null @@ -1,277 +0,0 @@ -.. -*- coding: utf-8 -*- -.. lightwood_docs documentation master file, created by - sphinx-quickstart on Tue Sep 7 13:07:48 2021. - You can adapt this file completely to your liking, but it should at least - contain the root ``toctree`` directive. - -**************************************** -Welcome to Lightwood's Documentation! -**************************************** - -:Release: |release| -:Date: |today| -| -Lightwood is an AutoML framework that enables you to generate and customize machine learning pipelines declarative syntax called JSON-AI. - -Our goal is to make the data science/machine learning (DS/ML) life cycle easier by allowing users to focus on **what** they want to do their data without needing to write repetitive boilerplate code around machine learning and data preparation. Instead, we enable you to focus on the parts of a model that are truly unique and custom. - -Lightwood works with a variety of data types such as numbers, dates, categories, tags, text, arrays and various multimedia formats. These data types can be combined together to solve complex problems. We also support a time-series mode for problems that have between-row dependencies. - -Our JSON-AI syntax allows users to change any and all parts of the models Lightwood automatically generates. The syntax outlines the specifics details in each step of the modeling pipeline. Users may override default values (for example, changing the type of a column) or alternatively, entirely replace steps with their own methods (ex: use a random forest model for a predictor). Lightwood creates a "JSON-AI" object from this syntax which can then be used to automatically generate python code to represent your pipeline. - -For details as to how Lightwood works, check out the `Lightwood Philosophy `_ . - -Quick Guide -======================= -- :ref:`Installation ` -- :ref:`Example Use Cases ` -- :ref:`Contribute to Lightwood ` -- :ref:`Hacktoberfest 2021 ` - -Installation -============ - -You can install Lightwood as follows: - -.. code-block:: bash - - pip3 install lightwood - -.. note:: depending on your environment, you might have to use pip instead of pip3 in the above command. - -However, we recommend creating a python virtual environment. - -Setting up a dev environment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -- Clone lightwood -- Run ``cd lightwood && pip install requirements.txt`` -- Add it to your python path (e.g. by adding ``export PYTHONPATH='/where/you/cloned/lightwood':$PYTHONPATH`` as a newline at the end of your ``~/.bashrc`` file) -- Check that the unit-tests are passing by going into the directory where you cloned lightwood and running: ``python -m unittest discover tests`` - -.. warning:: If ``python`` default to python2.x on your environment use ``python3`` and ``pip3`` instead - -Currently, the preferred environment for working with lightwood is visual studio code, a very popular python IDE. However, any IDE should work. While we don't have guides for those, please feel free to use the following section as a template for VSCode, or to contribute your own tips and tricks to set up other IDEs. - -Setting up a VSCode environment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -* Install and enable setting sync using github account (if you use multiple machines) -* Install pylance (for types) and make sure to disable pyright -* Go to ``Python > Lint: Enabled`` and disable everything *but* flake8 -* Set ``python.linting.flake8Path`` to the full path to flake8 (which flake8) -* Set ``Python › Formatting: Provider`` to autopep8 -* Add ``--global-config=/lightwood/.flake8`` and ``--experimental`` to ``Python › Formatting: Autopep8 Args`` -* Install live share and live share whiteboard - - -Example Use Cases -======================= - -Lightwood works with ``pandas.DataFrames``. Once a DataFrame is loaded, defined a "ProblemDefinition" via a dictionary. The only thing a user needs to specify is the name of the column to predict (via the key ``target``). - -Create a JSON-AI syntax from the command ``json_ai_from_problem``. Lightwood can then use this object to *automatically generate python code filling in the steps of the ML pipeline* via ``code_from_json_ai``. - -You can make a ``Predictor`` object, instantiated with that code via ``predictor_from_code``. - -To train a ``Predictor`` end-to-end, starting with unprocessed data, users can use the ``predictor.learn()`` command with the data. - -.. code-block:: python - - import pandas as pd - from lightwood.api.high_level import ( - ProblemDefinition, - json_ai_from_problem, - code_from_json_ai, - predictor_from_code, - ) - - # Load a pandas dataset - df = pd.read_csv( - "https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/hdi/data.csv" - ) - - # Define the prediction task by naming the target column - pdef = ProblemDefinition.from_dict( - { - "target": "Development Index", # column you want to predict - } - ) - - # Generate JSON-AI code to model the problem - json_ai = json_ai_from_problem(df, problem_definition=pdef) - - # OPTIONAL - see the JSON-AI syntax - #print(json_ai.to_json()) - - # Generate python code - code = code_from_json_ai(json_ai) - - # OPTIONAL - see generated code - #print(code) - - # Create a predictor from python code - predictor = predictor_from_code(code) - - # Train a model end-to-end from raw data to a finalized predictor - predictor.learn(df) - - # Make the train/test splits and show predictions for a few examples - test_df = predictor.split(predictor.preprocess(df))["test"] - preds = predictor.predict(test).iloc[:10] - print(preds) - -BYOM: Bring your own models -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Lightwood supports user architectures/approaches so long as you follow the abstractions provided within each step. - -Our `tutorials `_ provide specific use cases for how to introduce customization into your pipeline. Check out "custom cleaner", "custom splitter", "custom explainer", and "custom mixer". Stay tuned for further updates. - - -Contribute to Lightwood -======================= - -We love to receive contributions from the community and hear your opinions! We want to make contributing to Lightwood as easy as it can be. - -Being part of the core Lightwood team is possible to anyone who is motivated and wants to be part of that journey! - -Please continue reading this guide if you are interested in helping democratize machine learning. - -How can you help us? -^^^^^^^^^^^^^^^^^^^^^^^^ -* Report a bug -* Improve documentation -* Solve an issue -* Propose new features -* Discuss feature implementations -* Submit a bug fix -* Test Lightwood with your own data and let us know how it went! - -Code contributions -^^^^^^^^^^^^^^^^^^^^^^^^ -In general, we follow the `fork-and-pull `_ git workflow. Here are the steps: - -1. Fork the Lightwood repository -2. Checkout the ``staging`` branch, which is the development version that gets released weekly (there can be exceptions, but make sure to ask and confirm with us). -3. Make changes and commit them -4. Make sure that the CI tests pass. You can run the test suite locally with ``flake8 .`` to check style and ``python -m unittest discover tests`` to run the automated tests. This doesn't guarantee it will pass remotely since we run on multiple envs, but should work in most cases. -5. Push your local branch to your fork -6. Submit a pull request from your repo to the ``staging`` branch of ``mindsdb/lightwood`` so that we can review your changes. Be sure to merge the latest from staging before making a pull request! - -.. note:: You will need to sign a CLI agreement for the code since lightwood is under a GPL license. - - -Feature and Bug reports -^^^^^^^^^^^^^^^^^^^^^^^^ -We use GitHub issues to track bugs and features. Report them by opening a `new issue `_ and fill out all of the required inputs. - - -Code review process -^^^^^^^^^^^^^^^^^^^^^^^^^ -Pull request (PR) reviews are done on a regular basis. **If your PR does not address a previous issue, please make an issue first**. - -If your change has a chance to affecting performance we will run our private benchmark suite to validate it. - -Please, make sure you respond to our feedback/questions. - - -Community -^^^^^^^^^^^^^^^^^^^^^^^^^ -If you have additional questions or you want to chat with MindsDB core team, you can join our community: - -.. raw:: html - - - MindsDB Community - - -To get updates on Lightwood and MindsDB’s latest announcements, releases, and events, sign up for our `Monthly Community Newsletter `_. - -Join our mission of democratizing machine learning and allowing developers to become data scientists! - - -Hacktoberfest 2021 -======================= - -We are very excited that Lightwood is participating in this year's Hacktoberfest 2021 event. This month-long event through October gives you the chance to contribute to the Open Source codebase of Lightwood and MindsDB! - -The Lightwood core team has prepared several issues of different types that are ideal for first-time contributors and will be posted throughout the month. It's entirely up to you what you choose to work on and if you have your own great idea, feel free to suggest it by reaching out to us via our Slack community or by posting an issue with the `discussion` tag. - -**Our Major Incentive and SWAG!** - -Make contributions and enter into the draw for a `Deep Learning Laptop `_ **powered by the NVIDIA RTX 3080 Max-Q GPU**. Pre-installed with TensorFlow, PyTorch, CUDA, cuDNN and more. - -.. image:: _static/logos/laptop.jpeg - :align: center - :alt: Tensorbook by Lambda Labs - :width: 455 - :height: 400 - -Also, we’d love to send you a special MindsDB SWAG gift pack: - -.. image:: _static/logos/swag.png - :align: center - :alt: MindsDB Swag - -Please make sure to read the :ref:`contributions-guidelines ` first! - -How to participate -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -1. Contribute by making pull requests to any of our open issues labeled with the `hacktoberfest` tag during October. All hacktoberfest issues will specify how many points a successfully merged PR is worth. -2. Have a total score of at least 5 points in order to enter the big prize draw. -3. Complete the form with links to all your completed PR’s so we know where to ship the gift pack to! - -Entries close at midnight (PST) Sunday, 31 October 2021 with the prize draw winner announced at an online event on Monday, 1st of November. - - -Please check `MindsDB's hacktoberfest website `_ for more details. - -.. note:: if you wish to contribute with something that is *not currently flagged* as a hacktoberfest issue, make an issue (or make a comment if an issue already exists), and let one of the core Lightwood team researchers approve it. - - -Contributor Code of Conduct -^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Please note that this project is released with a `Contributor Code of Conduct `_. By participating in this project, you agree to abide by its terms. - - -Current contributors -======================= - -.. raw:: html - - - - - - - - -License -======================= -.. raw:: html - - - PyPI - License - - -| `Lightwood License `_ - - - - - -Other Links -======================= -.. toctree:: - :maxdepth: 1 - - lightwood_philosophy - tutorials - api - data - encoder - mixer - ensemble - analysis - helpers \ No newline at end of file diff --git a/docs/_sources/lightwood_philosophy.rst.txt b/docs/_sources/lightwood_philosophy.rst.txt deleted file mode 100644 index 946f7ca3b..000000000 --- a/docs/_sources/lightwood_philosophy.rst.txt +++ /dev/null @@ -1,34 +0,0 @@ -:mod:`Lightwood Philosophy` -================================ - -Lightwood abstracts the ML pipeline into 3 core steps: - -1. Pre-processing and data cleaning -2. Feature engineering -3. Model building and training - -.. image:: _static/logos/lightwood.png - :align: center - :alt: Lightwood "under-the-hood" - -i) Pre-processing and cleaning -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -For each column in your dataset, Lightwood will identify the suspected data type (numeric, categorical, etc.) via a brief statistical analysis. From this, it will generate a JSON-AI syntax. - -If the user keeps default behavior, Lightwood will perform a brief pre-processing approach to clean each column according to its identified data type. From there, it will split the data into train/dev/test splits. - -The `cleaner` and `splitter` objects respectively refer to the pre-processing and the data splitting functions. - -ii) Feature Engineering -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Data can be converted into features via "encoders". Encoders represent the rules for transforming pre-processed data into a numerical representations that a model can be used. - -Encoders can be **rule-based** or **learned**. A rule-based encoder transforms data per a specific set of instructions (ex: normalized numerical data) whereas a learned encoder produces a representation of the data after training (ex: a "\[CLS\]" token in a language model). - -Encoders are assigned to each column of data based on the data type; users can override this assignment either at the column-based level or at the data-type based level. Encoders inherit from the `BaseEncoder` class. - -iii) Model Building and Training -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -We call a predictive model that intakes *encoded* feature data and outputs a prediction for the target of interest a `mixer` model. Users can either use Lightwood's default mixers or create their own approaches inherited from the `BaseMixer` class. - -We predominantly use PyTorch based approaches, but can support other models. \ No newline at end of file diff --git a/docs/_sources/mixer.rst.txt b/docs/_sources/mixer.rst.txt deleted file mode 100644 index b1c56f273..000000000 --- a/docs/_sources/mixer.rst.txt +++ /dev/null @@ -1,7 +0,0 @@ -:mod:`Mixers` -========================== - -Machine learning models which learn to predict the target value using the encoded representations. - -.. automodule:: mixer - :members: diff --git a/docs/_sources/tutorials.rst.txt b/docs/_sources/tutorials.rst.txt deleted file mode 100644 index 823b34d3d..000000000 --- a/docs/_sources/tutorials.rst.txt +++ /dev/null @@ -1,34 +0,0 @@ -:mod:`Tutorials` -========================== -.. toctree:: - :maxdepth: 1 - :caption: Table of Contents: - - -Getting started with Lightwood and JSON-AI ----------------------------------------------- -The following tutorial will walk you through a simple tabular dataset with JSON-AI. - -| How to use Lightwood for your data (Coming Soon!) -| `Lightwood for a quick data analysis `_ - - -Run models with more complex data types ------------------------------------------------- - -Below, you can see how Lightwood handles language and time-series data. - -| Using Language Models (Coming Soon!) -| Make your own timeseries predictor (Coming Soon!) - - -Bring your own custom methods ------------------------------------------------- -We support users bringing their custom methods. To learn how to build your own pipelines, check out the following notebooks: - -| `Construct a custom preprocessor to clean your data `_ -| `Make your own train and test split `_ -| `Create your own encoder to featurize your data `_ (Rule-based) -| Create your own encoder to featurize your data using a learned representation (Coming Soon!) -| `Design a custom mixer model `_ -| `Use your own model explainer `_ \ No newline at end of file diff --git a/docs/_sources/tutorials/custom_cleaner/custom_cleaner.ipynb.txt b/docs/_sources/tutorials/custom_cleaner/custom_cleaner.ipynb.txt deleted file mode 100644 index 93e1d01ca..000000000 --- a/docs/_sources/tutorials/custom_cleaner/custom_cleaner.ipynb.txt +++ /dev/null @@ -1,1290 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "regulated-manufacturer", - "metadata": {}, - "source": [ - "## Using your own pre-processing methods in Lightwood\n", - "\n", - "#### Date: 2021.10.07\n", - "\n", - "For the notebook below, we'll be exploring how to make **custom pre-processing** methods for our data. Lightwood has standard cleaning protocols to handle a variety of different data types, however, we want users to feel comfortable augmenting and addressing their own changes. To do so, we'll highlight the approach we would take below:\n", - "\n", - "\n", - "We will use data from [Kaggle](https://www.kaggle.com/c/commonlitreadabilityprize/data?select=train.csv). \n", - "\n", - "The data has several columns, but ultimately aims to use text to predict a *readability score*. There are also some columns that I do not want to use when making predictions, such as `url_legal`, `license`, among others.\n", - "\n", - "In this tutorial, we're going to focus on making changes to 2 columns: \n", - "(1) **excerpt**, a text column, and ensuring we remove stop words using NLTK.
\n", - "(2) **target**, the goal to predict; we will make this explicitly non-negative.\n", - "\n", - "Note, for this ACTUAL challenge, negative and positive are meaningful. We are using this as an example dataset to demonstrate how you can make changes to your underlying dataset and proceed to building powerful predictors.\n", - "\n", - "Let's get started!" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "happy-wheat", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import pandas as pd\n", - "import torch\n", - "import nltk\n", - "\n", - "import os\n", - "import sys\n", - "\n", - "# Lightwood modules\n", - "import lightwood as lw\n", - "from lightwood import ProblemDefinition, \\\n", - " JsonAI, \\\n", - " json_ai_from_problem, \\\n", - " code_from_json_ai, \\\n", - " predictor_from_code" - ] - }, - { - "cell_type": "markdown", - "id": "indie-chaos", - "metadata": {}, - "source": [ - "### 1) Load your data\n", - "\n", - "Lightwood uses `pandas` in order to handle datasets, as this is a very standard package in datascience. We can load our dataset using pandas in the following manner (make sure your data is in the data folder!)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "recognized-parish", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
idurl_legallicenseexcerpttargetstandard_error
0c12129c31NaNNaNWhen the young people returned to the ballroom...-0.3402590.464009
185aa80a4cNaNNaNAll through dinner time, Mrs. Fayre was somewh...-0.3153720.480805
2b69ac6792NaNNaNAs Roger had predicted, the snow departed as q...-0.5801180.476676
3dd1000b26NaNNaNAnd outside before the palace a great garden w...-1.0540130.450007
437c1b32fbNaNNaNOnce upon a time there were Three Bears who li...0.2471970.510845
\n", - "
" - ], - "text/plain": [ - " id url_legal license \\\n", - "0 c12129c31 NaN NaN \n", - "1 85aa80a4c NaN NaN \n", - "2 b69ac6792 NaN NaN \n", - "3 dd1000b26 NaN NaN \n", - "4 37c1b32fb NaN NaN \n", - "\n", - " excerpt target standard_error \n", - "0 When the young people returned to the ballroom... -0.340259 0.464009 \n", - "1 All through dinner time, Mrs. Fayre was somewh... -0.315372 0.480805 \n", - "2 As Roger had predicted, the snow departed as q... -0.580118 0.476676 \n", - "3 And outside before the palace a great garden w... -1.054013 0.450007 \n", - "4 Once upon a time there were Three Bears who li... 0.247197 0.510845 " - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Load the data\n", - "ddir = \"data/\"\n", - "filename = os.path.join(ddir, \"train.csv.zip\")\n", - "\n", - "data = pd.read_csv(filename)\n", - "data.head()" - ] - }, - { - "cell_type": "markdown", - "id": "official-wright", - "metadata": {}, - "source": [ - "We see **6 columns**, a variety which are numerical, missing numbers, text, and identifiers or \"ids\". For our predictive task, we are only interested in 2 such columns, the **excerpt** and **target** columns.\n", - "\n", - "### 2) Create a JSON-AI default object\n", - "Before we create a custom cleaner object, let's first create JSON-AI syntax for our problem based on its specifications. We can do so by setting up a ``ProblemDefinition``. The ``ProblemDefinition`` allows us to specify the target, the column we intend to predict, along with other details. \n", - "\n", - "The end goal of JSON-AI is to provide **a set of instructions on how to compile a machine learning pipeline*.\n", - "\n", - "In this case, let's specify our target, the aptly named **target** column. We will also tell JSON-AI to throw away features we never intend to use, such as \"url_legal\", \"license\", and \"standard_error\". We can do so in the following lines:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "chicken-truth", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-50752:Dropping features: ['url_legal', 'license', 'standard_error']\n", - "INFO:lightwood-50752:Analyzing a sample of 2478\n", - "INFO:lightwood-50752:from a total population of 2834, this is equivalent to 87.4% of your data.\n", - "INFO:lightwood-50752:Using 15 processes to deduct types.\n", - "INFO:lightwood-50752:Infering type for: id\n", - "INFO:lightwood-50752:Infering type for: target\n", - "INFO:lightwood-50752:Infering type for: excerpt\n", - "INFO:lightwood-50752:Column target has data type float\n", - "INFO:lightwood-50752:Doing text detection for column: id\n", - "INFO:lightwood-50752:Doing text detection for column: excerpt\n", - "INFO:lightwood-50752:Column id has data type categorical\n", - "WARNING:lightwood-50752:Column id is an identifier of type \"Hash-like identifier\"\n", - "INFO:lightwood-50752:Starting statistical analysis\n", - "INFO:lightwood-50752:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Setup the problem definition\n", - "problem_definition = {\n", - " 'target': 'target',\n", - " \"ignore_features\": [\"url_legal\", \"license\", \"standard_error\"]\n", - "}\n", - "\n", - "# Generate the j{ai}son syntax\n", - "default_json = json_ai_from_problem(data, problem_definition)\n" - ] - }, - { - "cell_type": "markdown", - "id": "needed-flashing", - "metadata": {}, - "source": [ - "Lightwood, as it processes the data, will provide the user a few pieces of information.\n", - "\n", - "(1) It drops the features we specify in the `ignore_features` argument
\n", - "(2) It takes a small sample of data from each column to *automatically infer the data type*
\n", - "(3) For each column that was not ignored, it identifies the most likely data type.
\n", - "(4) It notices that \"ID\" is a hash-like-identifier.
\n", - "(5) It conducts a small statistical analysis on the distributions in order to generate syntax.
\n", - "\n", - "As soon as you request a JSON-AI object, Lightwood automatically creates functional syntax from your data. You can see it as follows: " - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "designed-condition", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"features\": {\n", - " \"excerpt\": {\n", - " \"encoder\": {\n", - " \"module\": \"Rich_Text.PretrainedLangEncoder\",\n", - " \"args\": {\n", - " \"output_type\": \"$dtype_dict[$target]\",\n", - " \"stop_after\": \"$problem_definition.seconds_per_encoder\"\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"outputs\": {\n", - " \"target\": {\n", - " \"data_dtype\": \"float\",\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {\n", - " \"is_target\": \"True\",\n", - " \"positive_domain\": \"$statistical_analysis.positive_domain\"\n", - " }\n", - " },\n", - " \"mixers\": [\n", - " {\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": true,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"LightGBM\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"fit_on_dev\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"Regression\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\"\n", - " }\n", - " }\n", - " ],\n", - " \"ensemble\": {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " \"ts_analysis\": null\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"problem_definition\": {\n", - " \"target\": \"target\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": true,\n", - " \"seconds_per_mixer\": 1582,\n", - " \"seconds_per_encoder\": 12749,\n", - " \"time_aim\": 7780.458037514903,\n", - " \"target_weights\": null,\n", - " \"positive_domain\": false,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": false,\n", - " \"order_by\": null,\n", - " \"window\": null,\n", - " \"group_by\": null,\n", - " \"use_previous_target\": true,\n", - " \"nr_predictions\": null,\n", - " \"historical_columns\": null,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": false\n", - " },\n", - " \"anomaly_detection\": true,\n", - " \"ignore_features\": [\n", - " \"url_legal\",\n", - " \"license\",\n", - " \"standard_error\"\n", - " ],\n", - " \"fit_on_all\": true,\n", - " \"strict_mode\": true,\n", - " \"seed_nr\": 420\n", - " },\n", - " \"identifiers\": {\n", - " \"id\": \"Hash-like identifier\"\n", - " },\n", - " \"accuracy_functions\": [\n", - " \"r2_score\"\n", - " ]\n", - "}\n" - ] - } - ], - "source": [ - "print(default_json.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "level-vacation", - "metadata": {}, - "source": [ - "The above shows the minimal syntax required to create a functional JSON-AI object. For each feature you consider in the dataset, we specify the name of the feature, the type of encoder (feature-engineering method) to process the feature, and key word arguments to process the encoder. For the output, we perform a similar operation, but specify the types of mixers, or algorithms used in making a predictor that can estimate the target. Lastly, we populate the \"problem_definition\" key with the ingredients for our ML pipeline.\n", - "\n", - "These are the only elements required to get off the ground with JSON-AI. However, we're interested in making a *custom* approach. So, let's make this syntax a file, and introduce our own changes." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "damaged-fluid", - "metadata": {}, - "outputs": [], - "source": [ - "with open(\"default.json\", \"w\") as fp:\n", - " fp.write(default_json.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "integrated-entrepreneur", - "metadata": {}, - "source": [ - "### 3) Build your own cleaner module\n", - "\n", - "Let's make a file called `MyCustomCleaner.py`. To write this file, we will use `lightwood.data.cleaner.cleaner` as inspiration.\n", - "\n", - "The goal output of the cleaner is to provide pre-processing to your dataset - the output is only a pandas DataFrame. In theory, any pre-processing can be done here. However, data can be highly irregular - our default `Cleaner` function has several main goals:\n", - "\n", - "(1) Strip away any identifier, etc. unwanted columns
\n", - "(2) Apply a cleaning function to each column in the dataset, according to that column's data type
\n", - "(3) Standardize NaN values within each column for appropriate downstream treatment
\n", - "\n", - "You can choose to omit many of these details and completely write this module from scratch, but the easiest way to introduce your custom changes is to borrow the `Cleaner` function, and add core changes in a custom block.\n", - "\n", - "This can be done as follows\n", - "\n", - "\n", - "You can see individual cleaning functions in `lightwood.data.cleaner`. If you want to entirely replace a cleaning technique given a particular data-type, we invite you to change `lightwood.data.cleaner.get_cleaning_func` using the argument `custom_cleaning_functions`; in this dictionary, for a datatype (specified in `api.dtype`), you can assign your own function to override our defaults. " - ] - }, - { - "cell_type": "markdown", - "id": "front-preview", - "metadata": {}, - "source": [ - "```\n", - "import re\n", - "from copy import deepcopy\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "\n", - "# For time-series\n", - "import datetime\n", - "from dateutil.parser import parse as parse_dt\n", - "\n", - "from lightwood.api.dtype import dtype\n", - "from lightwood.helpers import text\n", - "from lightwood.helpers.log import log\n", - "from lightwood.api.types import TimeseriesSettings\n", - "from lightwood.helpers.numeric import can_be_nan_numeric\n", - "\n", - "# Import NLTK for stopwords\n", - "import nltk\n", - "from nltk.corpus import stopwords\n", - "\n", - "stop_words = set(stopwords.words(\"english\"))\n", - "\n", - "from typing import Dict, List, Optional, Tuple, Callable, Union\n", - "\n", - "# Borrow functions from Lightwood's cleaner\n", - "from lightwood.data.cleaner import (\n", - " _remove_columns,\n", - " _get_columns_to_clean,\n", - " get_cleaning_func,\n", - ")\n", - "\n", - "# Use for standardizing NaNs\n", - "VALUES_FOR_NAN_AND_NONE_IN_PANDAS = [np.nan, \"nan\", \"NaN\", \"Nan\", \"None\"]\n", - "\n", - "\n", - "def cleaner(\n", - " data: pd.DataFrame,\n", - " dtype_dict: Dict[str, str],\n", - " identifiers: Dict[str, str],\n", - " target: str,\n", - " mode: str,\n", - " timeseries_settings: TimeseriesSettings,\n", - " anomaly_detection: bool,\n", - " custom_cleaning_functions: Dict[str, str] = {},\n", - ") -> pd.DataFrame:\n", - " \"\"\"\n", - " The cleaner is a function which takes in the raw data, plus additional information about it's types and about the problem. Based on this it generates a \"clean\" representation of the data, where each column has an ideal standardized type and all malformed or otherwise missing or invalid elements are turned into ``None``\n", - "\n", - " :param data: The raw data\n", - " :param dtype_dict: Type information for each column\n", - " :param identifiers: A dict containing all identifier typed columns\n", - " :param target: The target columns\n", - " :param mode: Can be \"predict\" or \"train\"\n", - " :param timeseries_settings: Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object\n", - " :param anomaly_detection: Are we detecting anomalies with this predictor?\n", - "\n", - " :returns: The cleaned data\n", - " \"\"\" # noqa\n", - "\n", - " data = _remove_columns(\n", - " data,\n", - " identifiers,\n", - " target,\n", - " mode,\n", - " timeseries_settings,\n", - " anomaly_detection,\n", - " dtype_dict,\n", - " )\n", - "\n", - " for col in _get_columns_to_clean(data, dtype_dict, mode, target):\n", - "\n", - " log.info(\"Cleaning column =\" + str(col))\n", - " # Get and apply a cleaning function for each data type\n", - " # If you want to customize the cleaner, it's likely you can to modify ``get_cleaning_func``\n", - " data[col] = data[col].apply(\n", - " get_cleaning_func(dtype_dict[col], custom_cleaning_functions)\n", - " )\n", - "\n", - " # ------------------------ #\n", - " # INTRODUCE YOUR CUSTOM BLOCK\n", - "\n", - " # If column data type is a text type, remove stop-words\n", - " if dtype_dict[col] in (dtype.rich_text, dtype.short_text):\n", - " data[col] = data[col].apply(\n", - " lambda x: \" \".join(\n", - " [word for word in x.split() if word not in stop_words]\n", - " )\n", - " )\n", - "\n", - " # Enforce numerical columns as non-negative\n", - " if dtype_dict[col] in (dtype.integer, dtype.float):\n", - " log.info(\"Converted \" + str(col) + \" into strictly non-negative\")\n", - " data[col] = data[col].apply(lambda x: x if x > 0 else 0.0)\n", - "\n", - " # ------------------------ #\n", - " data[col] = data[col].replace(\n", - " to_replace=VALUES_FOR_NAN_AND_NONE_IN_PANDAS, value=None\n", - " )\n", - "\n", - " return data\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "radical-armenia", - "metadata": {}, - "source": [ - "#### Place your custom module in `~/lightwood_modules`\n", - "\n", - "We automatically search for custom scripts in your `~/lightwood_modules` path. Place your file there. Later, you'll see when we autogenerate code, that you can change your import location if you choose." - ] - }, - { - "cell_type": "markdown", - "id": "characteristic-promotion", - "metadata": {}, - "source": [ - "### 4) Introduce your custom cleaner in JSON-AI\n", - "\n", - "Now let's introduce our custom cleaner. JSON-AI keeps a lightweight syntax but fills in many default modules (like splitting, cleaning).\n", - "\n", - "For the custom cleaner, we'll work by editing the \"cleaner\" key. We will change properties within it as follows:\n", - "(1) \"module\" - place the name of the function. In our case it will be \"MyCustomCleaner.cleaner\"\n", - "(2) \"args\" - any keyword argument specific to your cleaner's internals. \n", - "\n", - "This will look as follows:\n", - "```\n", - " \"cleaner\": {\n", - " \"module\": \"MyCustomCleaner.cleaner\",\n", - " \"args\": {\n", - " \"identifiers\": \"$identifiers\",\n", - " \"data\": \"data\",\n", - " \"dtype_dict\": \"$dtype_dict\",\n", - " \"target\": \"$target\",\n", - " \"mode\": \"$mode\",\n", - " \"timeseries_settings\": \"$problem_definition.timeseries_settings\",\n", - " \"anomaly_detection\": \"$problem_definition.anomaly_detection\"\n", - " }\n", - "```\n", - "\n", - "You may be wondering what the \"$\" variables reference. In certain cases, we'd like JSON-AI to auto-fill internal variables when automatically generating code, for example, we've already specified the \"target\" - it would be easier to simply refer in a modular sense what that term is. That is what these variables represent.\n", - "\n", - "As we borrowed most of the default `Cleaner`; we keep these arguments. In theory, if we were writing much of these details from scratch, we can customize these values as necessary." - ] - }, - { - "cell_type": "markdown", - "id": "respiratory-radiation", - "metadata": {}, - "source": [ - "### 5) Generate Python code representing your ML pipeline\n", - "\n", - "Now we're ready to load up our custom JSON-AI and generate the predictor code!\n", - "\n", - "We can do this by first reading in our custom json-syntax, and then calling the function `code_from_json_ai`. " - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "floating-patent", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n", - "import lightwood\n", - "from lightwood.analysis import *\n", - "from lightwood.api import *\n", - "from lightwood.data import *\n", - "from lightwood.encoder import *\n", - "from lightwood.ensemble import *\n", - "from lightwood.helpers.device import *\n", - "from lightwood.helpers.general import *\n", - "from lightwood.helpers.log import *\n", - "from lightwood.helpers.numeric import *\n", - "from lightwood.helpers.parallelism import *\n", - "from lightwood.helpers.seed import *\n", - "from lightwood.helpers.text import *\n", - "from lightwood.helpers.torch import *\n", - "from lightwood.mixer import *\n", - "import pandas as pd\n", - "from typing import Dict, List\n", - "import os\n", - "from types import ModuleType\n", - "import importlib.machinery\n", - "import sys\n", - "\n", - "\n", - "for import_dir in [os.path.expanduser(\"~/lightwood_modules\"), \"/etc/lightwood_modules\"]:\n", - " if os.path.exists(import_dir) and os.access(import_dir, os.R_OK):\n", - " for file_name in list(os.walk(import_dir))[0][2]:\n", - " print(file_name)\n", - " if file_name[-3:] != \".py\":\n", - " continue\n", - " mod_name = file_name[:-3]\n", - " print(mod_name)\n", - " loader = importlib.machinery.SourceFileLoader(\n", - " mod_name, os.path.join(import_dir, file_name)\n", - " )\n", - " module = ModuleType(loader.name)\n", - " loader.exec_module(module)\n", - " sys.modules[mod_name] = module\n", - " exec(f\"import {mod_name}\")\n", - "\n", - "\n", - "class Predictor(PredictorInterface):\n", - " target: str\n", - " mixers: List[BaseMixer]\n", - " encoders: Dict[str, BaseEncoder]\n", - " ensemble: BaseEnsemble\n", - " mode: str\n", - "\n", - " def __init__(self):\n", - " seed(420)\n", - " self.target = \"target\"\n", - " self.mode = \"inactive\"\n", - " self.problem_definition = ProblemDefinition.from_dict(\n", - " {\n", - " \"target\": \"target\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": True,\n", - " \"seconds_per_mixer\": 1582,\n", - " \"seconds_per_encoder\": 12749,\n", - " \"time_aim\": 7780.458037514903,\n", - " \"target_weights\": None,\n", - " \"positive_domain\": False,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": False,\n", - " \"order_by\": None,\n", - " \"window\": None,\n", - " \"group_by\": None,\n", - " \"use_previous_target\": True,\n", - " \"nr_predictions\": None,\n", - " \"historical_columns\": None,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": False,\n", - " },\n", - " \"anomaly_detection\": True,\n", - " \"ignore_features\": [\"url_legal\", \"license\", \"standard_error\"],\n", - " \"fit_on_all\": True,\n", - " \"strict_mode\": True,\n", - " \"seed_nr\": 420,\n", - " }\n", - " )\n", - " self.accuracy_functions = [\"r2_score\"]\n", - " self.identifiers = {\"id\": \"Hash-like identifier\"}\n", - " self.dtype_dict = {\"target\": \"float\", \"excerpt\": \"rich_text\"}\n", - "\n", - " # Any feature-column dependencies\n", - " self.dependencies = {\"excerpt\": []}\n", - "\n", - " self.input_cols = [\"excerpt\"]\n", - "\n", - " # Initial stats analysis\n", - " self.statistical_analysis = None\n", - "\n", - " def analyze_data(self, data: pd.DataFrame) -> None:\n", - " # Perform a statistical analysis on the unprocessed data\n", - "\n", - " log.info(\"Performing statistical analysis on data\")\n", - " self.statistical_analysis = lightwood.data.statistical_analysis(\n", - " data,\n", - " self.dtype_dict,\n", - " {\"id\": \"Hash-like identifier\"},\n", - " self.problem_definition,\n", - " )\n", - "\n", - " # Instantiate post-training evaluation\n", - " self.analysis_blocks = [\n", - " ICP(\n", - " fixed_significance=None,\n", - " confidence_normalizer=False,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " ),\n", - " AccStats(deps=[\"ICP\"]),\n", - " GlobalFeatureImportance(disable_column_importance=False),\n", - " ]\n", - "\n", - " def preprocess(self, data: pd.DataFrame) -> pd.DataFrame:\n", - " # Preprocess and clean data\n", - "\n", - " log.info(\"Cleaning the data\")\n", - " data = MyCustomCleaner.cleaner(\n", - " data=data,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Time-series blocks\n", - "\n", - " return data\n", - "\n", - " def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]:\n", - " # Split the data into training/testing splits\n", - "\n", - " log.info(\"Splitting the data into train/test\")\n", - " train_test_data = splitter(\n", - " data=data,\n", - " seed=1,\n", - " pct_train=80,\n", - " pct_dev=10,\n", - " pct_test=10,\n", - " tss=self.problem_definition.timeseries_settings,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " )\n", - "\n", - " return train_test_data\n", - "\n", - " def prepare(self, data: Dict[str, pd.DataFrame]) -> None:\n", - " # Prepare encoders to featurize data\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " if self.statistical_analysis is None:\n", - " raise Exception(\"Please run analyze_data first\")\n", - "\n", - " # Column to encoder mapping\n", - " self.encoders = {\n", - " \"target\": Float.NumericEncoder(\n", - " is_target=True,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " ),\n", - " \"excerpt\": Rich_Text.PretrainedLangEncoder(\n", - " output_type=False,\n", - " stop_after=self.problem_definition.seconds_per_encoder,\n", - " ),\n", - " }\n", - "\n", - " # Prepare the training + dev data\n", - " concatenated_train_dev = pd.concat([data[\"train\"], data[\"dev\"]])\n", - "\n", - " log.info(\"Preparing the encoders\")\n", - "\n", - " encoder_prepping_dict = {}\n", - "\n", - " # Prepare encoders that do not require learned strategies\n", - " for col_name, encoder in self.encoders.items():\n", - " if not encoder.is_trainable_encoder:\n", - " encoder_prepping_dict[col_name] = [\n", - " encoder,\n", - " concatenated_train_dev[col_name],\n", - " \"prepare\",\n", - " ]\n", - " log.info(\n", - " f\"Encoder prepping dict length of: {len(encoder_prepping_dict)}\"\n", - " )\n", - "\n", - " # Setup parallelization\n", - " parallel_prepped_encoders = mut_method_call(encoder_prepping_dict)\n", - " for col_name, encoder in parallel_prepped_encoders.items():\n", - " self.encoders[col_name] = encoder\n", - "\n", - " # Prepare the target\n", - " if self.target not in parallel_prepped_encoders:\n", - " if self.encoders[self.target].is_trainable_encoder:\n", - " self.encoders[self.target].prepare(\n", - " data[\"train\"][self.target], data[\"dev\"][self.target]\n", - " )\n", - " else:\n", - " self.encoders[self.target].prepare(\n", - " pd.concat([data[\"train\"], data[\"dev\"]])[self.target]\n", - " )\n", - "\n", - " # Prepare any non-target encoders that are learned\n", - " for col_name, encoder in self.encoders.items():\n", - " if encoder.is_trainable_encoder:\n", - " priming_data = pd.concat([data[\"train\"], data[\"dev\"]])\n", - " kwargs = {}\n", - " if self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"] = {}\n", - " for col in self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"][col] = {\n", - " \"original_type\": self.dtype_dict[col],\n", - " \"data\": priming_data[col],\n", - " }\n", - "\n", - " # If an encoder representation requires the target, provide priming data\n", - " if hasattr(encoder, \"uses_target\"):\n", - " kwargs[\"encoded_target_values\"] = parallel_prepped_encoders[\n", - " self.target\n", - " ].encode(priming_data[self.target])\n", - "\n", - " encoder.prepare(\n", - " data[\"train\"][col_name], data[\"dev\"][col_name], **kwargs\n", - " )\n", - "\n", - " def featurize(self, split_data: Dict[str, pd.DataFrame]):\n", - " # Featurize data into numerical representations for models\n", - "\n", - " log.info(\"Featurizing the data\")\n", - " feature_data = {key: None for key in split_data.keys()}\n", - "\n", - " for key, data in split_data.items():\n", - " feature_data[key] = EncodedDs(self.encoders, data, self.target)\n", - "\n", - " return feature_data\n", - "\n", - " def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Fit predictors to estimate target\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " log.info(\"Training the mixers\")\n", - "\n", - " # --------------- #\n", - " # Fit Models\n", - " # --------------- #\n", - " # Assign list of mixers\n", - " self.mixers = [\n", - " Neural(\n", - " fit_on_dev=True,\n", - " search_hyperparameters=True,\n", - " net=\"DefaultNet\",\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target_encoder=self.encoders[self.target],\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " ),\n", - " LightGBM(\n", - " fit_on_dev=True,\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " ),\n", - " Regression(\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " target_encoder=self.encoders[self.target],\n", - " ),\n", - " ]\n", - "\n", - " # Train mixers\n", - " trained_mixers = []\n", - " for mixer in self.mixers:\n", - " try:\n", - " mixer.fit(encoded_train_data, encoded_dev_data)\n", - " trained_mixers.append(mixer)\n", - " except Exception as e:\n", - " log.warning(f\"Exception: {e} when training mixer: {mixer}\")\n", - " if True and mixer.stable:\n", - " raise e\n", - "\n", - " # Update mixers to trained versions\n", - " self.mixers = trained_mixers\n", - "\n", - " # --------------- #\n", - " # Create Ensembles\n", - " # --------------- #\n", - " log.info(\"Ensembling the mixer\")\n", - " # Create an ensemble of mixers to identify best performing model\n", - " self.pred_args = PredictionArguments()\n", - " self.ensemble = BestOf(\n", - " ts_analysis=None,\n", - " data=encoded_test_data,\n", - " accuracy_functions=self.accuracy_functions,\n", - " target=self.target,\n", - " mixers=self.mixers,\n", - " )\n", - " self.supports_proba = self.ensemble.supports_proba\n", - "\n", - " def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Evaluate quality of fit for the ensemble of mixers\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " # --------------- #\n", - " # Analyze Ensembles\n", - " # --------------- #\n", - " log.info(\"Analyzing the ensemble of mixers\")\n", - " self.model_analysis, self.runtime_analyzer = model_analyzer(\n", - " data=encoded_test_data,\n", - " train_data=encoded_train_data,\n", - " stats_info=self.statistical_analysis,\n", - " ts_cfg=self.problem_definition.timeseries_settings,\n", - " accuracy_functions=self.accuracy_functions,\n", - " predictor=self.ensemble,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " analysis_blocks=self.analysis_blocks,\n", - " )\n", - "\n", - " def learn(self, data: pd.DataFrame) -> None:\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # Perform stats analysis\n", - " self.analyze_data(data)\n", - "\n", - " # Pre-process the data\n", - " clean_data = self.preprocess(data)\n", - "\n", - " # Create train/test (dev) split\n", - " train_dev_test = self.split(clean_data)\n", - "\n", - " # Prepare encoders\n", - " self.prepare(train_dev_test)\n", - "\n", - " # Create feature vectors from data\n", - " enc_train_test = self.featurize(train_dev_test)\n", - "\n", - " # Prepare mixers\n", - " self.fit(enc_train_test)\n", - "\n", - " # Analyze the ensemble\n", - " self.analyze_ensemble(enc_train_test)\n", - "\n", - " # ------------------------ #\n", - " # Enable model partial fit AFTER it is trained and evaluated for performance with the appropriate train/dev/test splits.\n", - " # This assumes the predictor could continuously evolve, hence including reserved testing data may improve predictions.\n", - " # SET `json_ai.problem_definition.fit_on_all=False` TO TURN THIS BLOCK OFF.\n", - "\n", - " # Update the mixers with partial fit\n", - " if self.problem_definition.fit_on_all:\n", - "\n", - " log.info(\"Adjustment on validation requested.\")\n", - " update_data = {\n", - " \"new\": enc_train_test[\"test\"],\n", - " \"old\": ConcatedEncodedDs(\n", - " [enc_train_test[\"train\"], enc_train_test[\"dev\"]]\n", - " ),\n", - " } # noqa\n", - "\n", - " self.adjust(update_data)\n", - "\n", - " def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Update mixers with new information\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data\n", - " encoded_old_data = new_data[\"old\"]\n", - " encoded_new_data = new_data[\"new\"]\n", - "\n", - " # --------------- #\n", - " # Adjust (Update) Mixers\n", - " # --------------- #\n", - " log.info(\"Updating the mixers\")\n", - "\n", - " for mixer in self.mixers:\n", - " mixer.partial_fit(encoded_new_data, encoded_old_data)\n", - "\n", - " def predict(self, data: pd.DataFrame, args: Dict = {}) -> pd.DataFrame:\n", - "\n", - " # Remove columns that user specifies to ignore\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - " for col in self.input_cols:\n", - " if col not in data.columns:\n", - " data[col] = [None] * len(data)\n", - "\n", - " # Clean the data\n", - " self.mode = \"predict\"\n", - " log.info(\"Cleaning the data\")\n", - " data = MyCustomCleaner.cleaner(\n", - " data=data,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Featurize the data\n", - " encoded_ds = EncodedDs(self.encoders, data, self.target)\n", - " encoded_data = encoded_ds.get_encoded_data(include_target=False)\n", - "\n", - " self.pred_args = PredictionArguments.from_dict(args)\n", - " df = self.ensemble(encoded_ds, args=self.pred_args)\n", - "\n", - " if self.pred_args.all_mixers:\n", - " return df\n", - " else:\n", - " insights, global_insights = explain(\n", - " data=data,\n", - " encoded_data=encoded_data,\n", - " predictions=df,\n", - " ts_analysis=None,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " analysis=self.runtime_analyzer,\n", - " target_name=self.target,\n", - " target_dtype=self.dtype_dict[self.target],\n", - " explainer_blocks=self.analysis_blocks,\n", - " fixed_confidence=self.pred_args.fixed_confidence,\n", - " anomaly_error_rate=self.pred_args.anomaly_error_rate,\n", - " anomaly_cooldown=self.pred_args.anomaly_cooldown,\n", - " )\n", - " return insights\n", - "\n" - ] - } - ], - "source": [ - "# Make changes to your JSON-file and load the custom version\n", - "with open('custom.json', 'r') as fp:\n", - " modified_json = JsonAI.from_json(fp.read())\n", - "\n", - "#Generate python code that fills in your pipeline\n", - "code = code_from_json_ai(modified_json)\n", - "\n", - "print(code)\n", - "\n", - "# Save code to a file (Optional)\n", - "with open('custom_cleaner_pipeline.py', 'w') as fp:\n", - " fp.write(code)" - ] - }, - { - "cell_type": "markdown", - "id": "handled-oasis", - "metadata": {}, - "source": [ - "As you can see, an end-to-end pipeline of our entire ML procedure has been generating. There are several abstracted functions to enable transparency as to what processes your data goes through in order to build these models.\n", - "\n", - "The key steps of the pipeline are as follows:\n", - "\n", - "(1) Run a **statistical analysis** with `analyze_data`
\n", - "(2) Clean your data with `preprocess`
\n", - "(3) Make a training/dev/testing split with `split`
\n", - "(4) Prepare your feature-engineering pipelines with `prepare`
\n", - "(5) Create your features with `featurize`
\n", - "(6) Fit your predictor models with `fit`
\n", - "\n", - "You can customize this further if necessary, but you have all the steps necessary to train a model!\n", - "\n", - "We recommend familiarizing with these steps by calling the above commands, ideally in order. Some commands (namely `prepare`, `featurize`, and `fit`) do depend on other steps.\n", - "\n", - "If you want to omit the individual steps, we recommend your simply call the `learn` method, which compiles all the necessary steps implemented to give your fully trained predictive models starting with unprocessed data! " - ] - }, - { - "cell_type": "markdown", - "id": "meaning-saskatchewan", - "metadata": {}, - "source": [ - "### 6) Call python to run your code and see your preprocessed outputs\n", - "\n", - "Once we have code, we can turn this into a python object by calling `predictor_from_code`. This instantiates the `PredictorInterface` object. \n", - "\n", - "This predictor object can be then used to run your pipeline." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "violent-guard", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Turn the code above into a predictor object\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "closing-episode", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-50752:Cleaning the data\n", - "INFO:lightwood-50752:Cleaning column =target\n", - "INFO:lightwood-50752:Converted target into strictly non-negative\n", - "INFO:lightwood-50752:Cleaning column =excerpt\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
excerpttarget
0When young people returned ballroom, presented...0.000000
1All dinner time, Mrs. Fayre somewhat silent, e...0.000000
2As Roger predicted, snow departed quickly came...0.000000
3And outside palace great garden walled round, ...0.000000
4Once upon time Three Bears lived together hous...0.247197
\n", - "
" - ], - "text/plain": [ - " excerpt target\n", - "0 When young people returned ballroom, presented... 0.000000\n", - "1 All dinner time, Mrs. Fayre somewhat silent, e... 0.000000\n", - "2 As Roger predicted, snow departed quickly came... 0.000000\n", - "3 And outside palace great garden walled round, ... 0.000000\n", - "4 Once upon time Three Bears lived together hous... 0.247197" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Pre-process the data\n", - "cleaned_data = predictor.preprocess(data)\n", - "\n", - "cleaned_data.head()" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "major-stake", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[1mOriginal Data\n", - "\u001b[0m\n", - "Excerpt:\n", - " When the young people returned to the ballroom, it presented a decidedly changed appearance. Instead of an interior scene, it was a winter landscape.\n", - "The floor was covered with snow-white canvas, not laid on smoothly, but rumpled over bumps and hillocks, like a real snow field. The numerous palms and evergreens that had decorated the room, were powdered with flour and strewn with tufts of cotton, like snow. Also diamond dust had been lightly sprinkled on them, and glittering crystal icicles hung from the branches.\n", - "At each end of the room, on the wall, hung a beautiful bear-skin rug.\n", - "These rugs were for prizes, one for the girls and one for the boys. And this was the game.\n", - "The girls were gathered at one end of the room and the boys at the other, and one end was called the North Pole, and the other the South Pole. Each player was given a small flag which they were to plant on reaching the Pole.\n", - "This would have been an easy matter, but each traveller was obliged to wear snowshoes.\n", - "\n", - "Target:\n", - " -0.340259125\n", - "\u001b[1m\n", - "\n", - "Cleaned Data\n", - "\u001b[0m\n", - "Excerpt:\n", - " When young people returned ballroom, presented decidedly changed appearance. Instead interior scene, winter landscape. The floor covered snow-white canvas, laid smoothly, rumpled bumps hillocks, like real snow field. The numerous palms evergreens decorated room, powdered flour strewn tufts cotton, like snow. Also diamond dust lightly sprinkled them, glittering crystal icicles hung branches. At end room, wall, hung beautiful bear-skin rug. These rugs prizes, one girls one boys. And game. The girls gathered one end room boys other, one end called North Pole, South Pole. Each player given small flag plant reaching Pole. This would easy matter, traveller obliged wear snowshoes.\n", - "\n", - "Target:\n", - " 0.0\n" - ] - } - ], - "source": [ - "print(\"\\033[1m\" + \"Original Data\\n\" + \"\\033[0m\")\n", - "print(\"Excerpt:\\n\", data.iloc[0][\"excerpt\"])\n", - "print(\"\\nTarget:\\n\", data.iloc[0][\"target\"])\n", - "\n", - "print(\"\\033[1m\" + \"\\n\\nCleaned Data\\n\" + \"\\033[0m\")\n", - "print(\"Excerpt:\\n\", cleaned_data.iloc[0][\"excerpt\"])\n", - "print(\"\\nTarget:\\n\", cleaned_data.iloc[0][\"target\"])" - ] - }, - { - "cell_type": "markdown", - "id": "celtic-scientist", - "metadata": {}, - "source": [ - "As you can see, the cleaning-process we introduced cut out the stop-words from the Excerpt, and enforced the target data to stay positive.\n", - "\n", - "We hope this tutorial was informative on how to introduce a **custom preprocessing method** to your datasets! For more customization tutorials, please check our [documentation](https://lightwood.io/tutorials.html).\n", - "\n", - "If you want to download the Jupyter-notebook version of this tutorial, check out the source github location found here: `lightwood/docssrc/source/tutorials/custom_cleaner`. " - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/_sources/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.ipynb.txt b/docs/_sources/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.ipynb.txt deleted file mode 100644 index b708714d7..000000000 --- a/docs/_sources/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.ipynb.txt +++ /dev/null @@ -1,887 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "smooth-philip", - "metadata": {}, - "source": [ - "### Custom Encoder: Rule-Based\n", - "\n", - "Lightwood uses \"Encoders\" to convert preprocessed (cleaned) data into **features**. Encoders represent the **feature engineering** step of the data science pipeline; they can either have a set of instructions (\"rule-based\") or a learned representation (trained on data).\n", - "\n", - "In the following notebook, we will experiment with creating a custom encoder that creates **Label Encoding**. \n", - "\n", - "For example, imagine we have the following set of categories:\n", - "\n", - "```\n", - "MyColumnData = [\"apple\", \"orange\", \"orange\", \"banana\", \"apple\", \"dragonfruit\"]\n", - "```\n", - "\n", - "There are 4 categories to consider: \"apple\", \"banana\", \"orange\", and \"dragonfruit\".\n", - "\n", - "**Label encoding** allows you to refer to these categories as if they were numbers. For example, consider the mapping (arranged alphabetically):\n", - "\n", - "1 - apple
\n", - "2 - banana
\n", - "3 - dragonfruit
\n", - "4 - orange
\n", - "\n", - "Using this mapping, we can convert the above data as follows:\n", - "\n", - "```\n", - "MyFeatureData = [1, 4, 4, 2, 1, 3]\n", - "```\n", - "\n", - "In the following notebook, we will design a **LabelEncoder** for Lightwood for use on categorical data. We will be using the Kaggle \"Used Car\" [dataset](https://www.kaggle.com/adityadesai13/used-car-dataset-ford-and-mercedes). We've provided a link for you to automatically access this CSV. This dataset describes various details of cars on sale - with the goal of predicting how much this car may sell for.\n", - "\n", - "Let's get started." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "raising-adventure", - "metadata": {}, - "outputs": [], - "source": [ - "import pandas as pd\n", - "\n", - "# Lightwood modules\n", - "import lightwood as lw\n", - "from lightwood import ProblemDefinition, \\\n", - " JsonAI, \\\n", - " json_ai_from_problem, \\\n", - " code_from_json_ai, \\\n", - " predictor_from_code" - ] - }, - { - "cell_type": "markdown", - "id": "instant-income", - "metadata": {}, - "source": [ - "### 1) Load your data\n", - "\n", - "Lightwood works with `pandas.DataFrame`s; load data via pandas as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "technical-government", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
modelyearpricetransmissionmileagefuelTypetaxmpgengineSize
0A1201712500Manual15735Petrol15055.41.4
1A6201616500Automatic36203Diesel2064.22.0
2A1201611000Manual29946Petrol3055.41.4
3A4201716800Automatic25952Diesel14567.32.0
4A3201917300Manual1998Petrol14549.61.0
\n", - "
" - ], - "text/plain": [ - " model year price transmission mileage fuelType tax mpg engineSize\n", - "0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4\n", - "1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0\n", - "2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4\n", - "3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0\n", - "4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "filename = 'https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/used_car_price/data.csv'\n", - "df = pd.read_csv(filename)\n", - "df.head()" - ] - }, - { - "cell_type": "markdown", - "id": "anonymous-rainbow", - "metadata": {}, - "source": [ - "We can see a handful of columns above, such as `model, year, price, transmission, mileage, fuelType, tax, mpg, engineSize`. Some columns are numerical whereas others are categorical. We are going to specifically only focus on categorical columns.\n", - "\n", - "\n", - "### 2) Generate JSON-AI Syntax\n", - "\n", - "We will make a `LabelEncoder` as follows:\n", - "\n", - "(1) Find all unique examples within a column
\n", - "(2) Order the examples in a consistent way
\n", - "(3) Label (python-index of 0 as start) each category
\n", - "(4) Assign the label according to each datapoint.
\n", - "\n", - "First, let's generate a JSON-AI syntax so we can automatically identify each column. " - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "absent-maker", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-53258:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Analyzing a sample of 6920\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:from a total population of 10668, this is equivalent to 64.9% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Using 15 processes to deduct types.\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: year\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: model\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: price\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: mileage\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: transmission\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: fuelType\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: tax\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: mpg\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: engineSize\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column year has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column tax has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column price has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column mileage has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column engineSize has data type float\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column mpg has data type float\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column fuelType has data type categorical\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column transmission has data type categorical\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column model has data type categorical\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Finished statistical analysis\u001b[0m\n" - ] - } - ], - "source": [ - "# Create the Problem Definition\n", - "pdef = ProblemDefinition.from_dict({\n", - " 'target': 'price', # column you want to predict\n", - " #'ignore_features': ['year', 'mileage', 'tax', 'mpg', 'engineSize']\n", - "})\n", - "\n", - "# Generate a JSON-AI object\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)" - ] - }, - { - "cell_type": "markdown", - "id": "swedish-riverside", - "metadata": {}, - "source": [ - "Let's take a look at our JSON-AI and print to file." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "coastal-paragraph", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"features\": {\n", - " \"model\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"categorical\"\n", - " },\n", - " \"year\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"integer\"\n", - " },\n", - " \"transmission\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"categorical\"\n", - " },\n", - " \"mileage\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"integer\"\n", - " },\n", - " \"fuelType\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"categorical\"\n", - " },\n", - " \"tax\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"integer\"\n", - " },\n", - " \"mpg\": {\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"float\"\n", - " },\n", - " \"engineSize\": {\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"float\"\n", - " }\n", - " },\n", - " \"outputs\": {\n", - " \"price\": {\n", - " \"data_dtype\": \"integer\",\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {\n", - " \"is_target\": \"True\",\n", - " \"positive_domain\": \"$statistical_analysis.positive_domain\"\n", - " }\n", - " },\n", - " \"mixers\": [\n", - " {\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": true,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"LightGBM\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"fit_on_dev\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"Regression\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\"\n", - " }\n", - " }\n", - " ],\n", - " \"ensemble\": {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " \"ts_analysis\": null\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"problem_definition\": {\n", - " \"target\": \"price\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": true,\n", - " \"seconds_per_mixer\": 3011,\n", - " \"seconds_per_encoder\": 0,\n", - " \"time_aim\": 13552.040324918955,\n", - " \"target_weights\": null,\n", - " \"positive_domain\": false,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": false,\n", - " \"order_by\": null,\n", - " \"window\": null,\n", - " \"group_by\": null,\n", - " \"use_previous_target\": true,\n", - " \"nr_predictions\": null,\n", - " \"historical_columns\": null,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": false\n", - " },\n", - " \"anomaly_detection\": true,\n", - " \"ignore_features\": [],\n", - " \"fit_on_all\": true,\n", - " \"strict_mode\": true,\n", - " \"seed_nr\": 420\n", - " },\n", - " \"identifiers\": {},\n", - " \"accuracy_functions\": [\n", - " \"r2_score\"\n", - " ]\n", - "}\n" - ] - } - ], - "source": [ - "print(json_ai.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "expired-flour", - "metadata": {}, - "source": [ - "### 3) Create your custom encoder (`LabelEncoder`).\n", - "\n", - "Once our JSON-AI is filled, let's make our LabelEncoder. All Lightwood encoders inherit from the `BaseEncoder` class, found [here](https://github.com/mindsdb/lightwood/blob/staging/lightwood/encoder/base.py). \n", - "\n", - "![BaseEncoder](baseencoder.png)\n", - "\n", - "\n", - "The `BaseEncoder` has 5 expected calls:\n", - "\n", - "- `__init__`: instantiate the encoder\n", - "- `prepare`: Train or create the rules of the encoder\n", - "- `encode`: Given data, convert to the featurized representation\n", - "- `decode`: Given featurized representations, revert back to data\n", - "- `to`: Use CPU/GPU (mostly important for learned representations)\n", - "\n", - "From above, we see that \"model\", \"transmission\", and \"fuelType\" are all categorical columns. These will be the ones we want to modify." - ] - }, - { - "cell_type": "markdown", - "id": "verbal-northwest", - "metadata": {}, - "source": [ - "##### `LabelEncoder`\n", - "\n", - "The `LabelEncoder` should satisfy a couple of rules\n", - "\n", - "(1) For the ``__init__`` call:
\n", - " - Specify the only argument `is_target`; this asks whether the encoder aims to represent the target column.
\n", - " - Set `is_prepared=False` in the initialization. All encoders are prepared using their `prepare()` call, which turns this flag on to `True` if preparation of the encoders is successful.
\n", - " - Set `output_size=1`; the output size refers to how many options the represented encoder may adopt. \n", - " \n", - " \n", - "(2) For the ``prepare`` call:\n", - " - Specify the only argument `priming_data`; this provides the `pd.Series` of the data column for the encoder.\n", - " - Find all unique categories in the column data\n", - " - Make a dictionary representing label number to category (reserves 0 as Unknown) and the inverse dictionary\n", - " - Set `is_prepared=True`\n", - " \n", - "(3) The `encode()` call will convert each data point's category name into the encoded label.\n", - "\n", - "(4) The `decode()` call will convert a previously encoded label into the original category name.\n", - "\n", - "Given this approach only uses simple dictionaries, there is no need for a dedicated `to()` call (although this would inherit `BaseEncoder`'s implementation).\n", - "\n", - "This implementation would look as follows:" - ] - }, - { - "cell_type": "markdown", - "id": "approximate-situation", - "metadata": {}, - "source": [ - "```python\n", - "\"\"\"\n", - "2021.10.13\n", - "\n", - "Create a LabelEncoder that transforms categorical data into a label.\n", - "\"\"\"\n", - "import pandas as pd\n", - "import torch\n", - "\n", - "from lightwood.encoder import BaseEncoder\n", - "from typing import List, Union\n", - "from lightwood.helpers.log import log\n", - "\n", - "\n", - "class LabelEncoder(BaseEncoder):\n", - " \"\"\"\n", - " Create a label representation for categorical data. The data will rely on sorted to organize the order of the labels.\n", - "\n", - " Class Attributes:\n", - " - is_target: Whether this is used to encode the target\n", - " - is_prepared: Whether the encoder rules have been set (after ``prepare`` is called)\n", - "\n", - " \"\"\" # noqa\n", - "\n", - " is_target: bool\n", - " is_prepared: bool\n", - "\n", - " is_timeseries_encoder: bool = False\n", - " is_trainable_encoder: bool = False\n", - "\n", - " def __init__(self, is_target: bool = False) -> None:\n", - " \"\"\"\n", - " Initialize the Label Encoder\n", - "\n", - " :param is_target:\n", - " \"\"\"\n", - " self.is_target = is_target\n", - " self.is_prepared = False\n", - "\n", - " # Size of the output encoded dimension per data point\n", - " # For LabelEncoder, this is always 1 (1 label per category)\n", - " self.output_size = 1\n", - "\n", - " # Not all encoders need to be prepared\n", - " def prepare(self, priming_data: pd.Series) -> None:\n", - " \"\"\"\n", - " Create a LabelEncoder for categorical data.\n", - "\n", - " LabelDict creates a mapping where each index is associated to a category.\n", - "\n", - " :param priming_data: Input column data that is categorical.\n", - "\n", - " :returns: Nothing; prepares encoder rules with `label_dict` and `ilabel_dict`\n", - " \"\"\"\n", - "\n", - " # Find all unique categories in the dataset\n", - " categories = priming_data.unique()\n", - "\n", - " log.info(\"Categories Detected = \" + str(self.output_size))\n", - "\n", - " # Create the Category labeller\n", - " self.label_dict = {\"Unknown\": 0} # Include an unknown category\n", - " self.label_dict.update({cat: idx + 1 for idx, cat in enumerate(categories)})\n", - " self.ilabel_dict = {idx: cat for cat, idx in self.label_dict.items()}\n", - "\n", - " self.is_prepared = True\n", - "\n", - " def encode(self, column_data: Union[pd.Series, list]) -> torch.Tensor:\n", - " \"\"\"\n", - " Convert pre-processed data into the labeled values\n", - "\n", - " :param column_data: Pandas series to convert into labels\n", - " \"\"\"\n", - " if isinstance(column_data, pd.Series):\n", - " enc = column_data.apply(lambda x: self.label_dict.get(x, 0)).tolist()\n", - " else:\n", - " enc = [self.label_dict.get(x, 0) for x in column_data]\n", - "\n", - " return torch.Tensor(enc).int().unsqueeze(1)\n", - "\n", - " def decode(self, encoded_data: torch.Tensor) -> List[object]:\n", - " \"\"\"\n", - " Convert torch.Tensor labels into categorical data\n", - "\n", - " :param encoded_data: Encoded data in the form of a torch.Tensor\n", - " \"\"\"\n", - " return [self.ilabel_dict[i.item()] for i in encoded_data]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "optical-archive", - "metadata": {}, - "source": [ - "Some additional notes:\n", - "(1) The `encode()` call should be able to intake a list of values, it is optional to make it compatible with `pd.Series` or `pd.DataFrame`
\n", - "(2) The output of `encode()` must be a torch tensor with dimensionality $N_{rows} x N_{output}$.\n", - "\n", - "Now that the `LabelEncoder` is complete, move this to `~/lightwood_modules` and we're ready to try this out!\n", - "\n", - "### 4) Edit JSON-AI\n", - "\n", - "Now that we have our `LabelEncoder` script, we have two ways of introducing this encoder:\n", - "\n", - "(1) Change all categorical columns to our encoder of choice
\n", - "(2) Replace the default encoder (`Categorical.OneHotEncoder`) for categorical data to our encoder of choice
\n", - "\n", - "In the first scenario, we may not want to change ALL columns. By switching the encoder on a `Feature` level, Lightwood allows you to control how representations for a given feature are handled. However, suppose you want to replace an approach entirely with your own methodology - Lightwood supports overriding default methods to control how you want to treat a *data type* as well.\n", - "\n", - "Below, we'll show both strategies:" - ] - }, - { - "cell_type": "markdown", - "id": "quiet-lodging", - "metadata": {}, - "source": [ - "The first strategy requires just specifying which features you'd like to change. Once you have your list, you can manually set the encoder \"module\" to the class you'd like. **This is best suited for a few columns or if you only want to override a few particular columns as opposed to replacing the `Encoder` behavior for an entire data type**.\n", - "#### Strategy 1: Change the encoders for the features directly\n", - "```python\n", - "for ft in [\"model\", \"transmission\", \"fuelType\"]: # Features you want to replace\n", - " # Set each feature to the custom encoder\n", - " json_ai.features[ft].encoder['module'] = 'LabelEncoder.LabelEncoder'\n", - "```\n", - "\n", - "\n", - "Suppose you have many columns that are categorical- you may want to enforce your approach explicitly without naming each column. This can be done by examining the `data_dtype` of JSON-AI's features. For all features that are type `categorical` (while this is a `str`, it's ideal to import dtype and explicitly check the data type), replace the default `Encoder` with your encoder. In this case, this is `LabelEncoder.LabelEncoder`.\n", - "#### Strategy 2: Programatically change *all* encoder assignments for a data type\n", - "\n", - "```python\n", - "from lightwood.api import dtype\n", - "for i in json_ai.features:\n", - " if json_ai.features[i].data_dtype == dtype.categorical:\n", - " json_ai.features[i].encoder['module'] = 'LabelEncoder.LabelEncoder'\n", - "```\n", - "\n", - "We'll go with the first approach for simplicity:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "elementary-fusion", - "metadata": {}, - "outputs": [], - "source": [ - "for ft in [\"model\", \"transmission\", \"fuelType\"]: # Features you want to replace\n", - " # Set each feature to the custom encoder\n", - " json_ai.features[ft].encoder['module'] = 'LabelEncoder.LabelEncoder'" - ] - }, - { - "cell_type": "markdown", - "id": "together-austria", - "metadata": {}, - "source": [ - "### 5) Generate code and your predictor from JSON-AI\n", - "\n", - "Now, let's use this JSON-AI object to generate code and make a predictor. This can be done in 2 simple lines, below:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "inappropriate-james", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-53258:Unable to import black formatter, predictor code might be a bit ugly.\u001b[0m\n" - ] - } - ], - "source": [ - "#Generate python code that fills in your pipeline\n", - "code = code_from_json_ai(json_ai)\n", - "\n", - "# Turn the code above into a predictor object\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "markdown", - "id": "personalized-andorra", - "metadata": {}, - "source": [ - "Now, let's run our pipeline. To do so, let's first:\n", - "\n", - "(1) Perform a statistical analysis on the data (*this is important in preparing Encoders/Mixers as it populates the* `StatisticalAnalysis` *attribute with details some encoders need*).
\n", - "(2) Clean our data
\n", - "(3) Prepare the encoders
\n", - "(4) Featurize the data
" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "palestinian-harvey", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-53258:Performing statistical analysis on data\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Splitting the data into train/test\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Preparing the encoders\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 2\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 3\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 4\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 5\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 6\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 7\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 8\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 9\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Categories Detected = 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Categories Detected = 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Categories Detected = 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: price\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: model\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: year\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: transmission\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: mileage\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: fuelType\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: tax\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: mpg\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: engineSize\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Featurizing the data\u001b[0m\n" - ] - } - ], - "source": [ - "# Perform Stats Analysis\n", - "predictor.analyze_data(df)\n", - "\n", - "# Pre-process the data\n", - "cleaned_data = predictor.preprocess(data=df)\n", - "\n", - "# Create a train/test split\n", - "split_data = predictor.split(cleaned_data)\n", - "\n", - "# Prepare the encoders \n", - "predictor.prepare(split_data)\n", - "\n", - "# Featurize the data\n", - "ft_data = predictor.featurize(split_data)" - ] - }, - { - "cell_type": "markdown", - "id": "ordered-beast", - "metadata": {}, - "source": [ - "The splitter creates 3 data-splits, a \"train\", \"dev\", and \"test\" set. The `featurize` command from the predictor allows us to convert the cleaned data into features. We can access this as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "silent-dealing", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
fuelTypeEncData
0Diesel1
1Diesel1
2Diesel1
3Petrol2
4Diesel1
\n", - "
" - ], - "text/plain": [ - " fuelType EncData\n", - "0 Diesel 1\n", - "1 Diesel 1\n", - "2 Diesel 1\n", - "3 Petrol 2\n", - "4 Diesel 1" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Pick a categorical column name\n", - "col_name = \"fuelType\"\n", - "\n", - "# Get the encoded feature data\n", - "enc_ft = ft_data[\"train\"].get_encoded_column_data(col_name).squeeze(1) #torch tensor (N_rows x N_output_dim)\n", - "\n", - "# Get the original data from the dataset\n", - "orig_data = ft_data[\"train\"].get_column_original_data(col_name) #pandas dataframe\n", - "\n", - "# Create a pandas data frame to compare encoded data and original data\n", - "compare_data = pd.concat([orig_data, pd.Series(enc_ft, name=\"EncData\")], axis=1)\n", - "compare_data.head()" - ] - }, - { - "cell_type": "markdown", - "id": "fatty-peoples", - "metadata": {}, - "source": [ - "We can see what the label mapping is by inspecting our encoders as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "superior-mobility", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'Unknown': 0, 'Diesel': 1, 'Petrol': 2, 'Hybrid': 3}\n" - ] - } - ], - "source": [ - "# Label Name -> Label Number\n", - "print(predictor.encoders[col_name].label_dict)" - ] - }, - { - "cell_type": "markdown", - "id": "frequent-remedy", - "metadata": {}, - "source": [ - "For each category above, the number associated in the dictionary is the label for each category. This means \"Diesel\" is always represented by a 1, etc.\n", - "\n", - "With that, you've created your own custom Encoder that uses a rule-based approach! Please checkout more [tutorials](https://lightwood.io/tutorials.html) for other custom approach guides." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/_sources/tutorials/custom_explainer/custom_explainer.ipynb.txt b/docs/_sources/tutorials/custom_explainer/custom_explainer.ipynb.txt deleted file mode 100644 index 4f76349a5..000000000 --- a/docs/_sources/tutorials/custom_explainer/custom_explainer.ipynb.txt +++ /dev/null @@ -1,592 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Implementing a custom analysis block in Lightwood\n", - "\n", - "\n", - "## Introduction\n", - "\n", - "As you might already know, Lightwood is designed to be a flexible machine learning (ML) library that is able to abstract and automate the entire ML pipeline. Crucially, it is also designed to be extended or modified very easily according to your needs, essentially offering the entire spectrum between fully automated AutoML and a lightweight wrapper for customized ML pipelines.\n", - "\n", - "As such, we can identify several different customizable \"phases\" in the process. The relevant phase for this tutorial is the \"analysis\" that comes after a predictor has been trained. The goal of this phase is to generate useful insights, like accuracy metrics, confusion matrices, feature importance, etc. These particular examples are all included in the core analysis procedure that Lightwood executes.\n", - "\n", - "However, the analysis procedure is structured into a sequential execution of \"analysis blocks\". Each analysis block should generate a well-defined set of insights, as well as handling any actions regarding these at inference time.\n", - "\n", - "As an example, one of the core blocks is the Inductive Conformal Prediction (`ICP`) block, which handles the confidence estimation of all Lightwood predictors. The logic within can be complex at times, but thanks to the block abstraction we can deal with it in a structured manner. As this `ICP` block is used when generating predictions, it implements the two main methods that the `BaseAnalysisBlock` class specifies: `.analyze()` to setup everything that is needed, and `.explain()` to actually estimate the confidence in any given prediction.\n", - "\n", - "\n", - "## Objective\n", - "\n", - "In this tutorial, we will go through the steps required to implement your own analysis blocks to customize the insights of any Lightwood predictor!\n", - "\n", - "In particular, we will implement a \"model correlation heatmap\" block: we want to compare the predictions of all mixers inside a `BestOf` ensemble object, to understand how they might differ in their overall behavior." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'1.3.0'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from typing import Dict, Tuple\n", - "import pandas as pd\n", - "import lightwood\n", - "lightwood.__version__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 1: figuring out what we need\n", - "\n", - "When designing an analysis block, an important choice needs to be made: will this block operate when calling the predictor? Or is it only going to describe its performance once in the held-out validation dataset?\n", - "\n", - "Being in the former case means we need to implement both `.analyze()` and `.explain()` methods, while the latter case only needs an `.analyze()` method. Our `ModelCorrelationHeatmap` belongs to this second category.\n", - "\n", - "Let's start the implementation by inheriting from `BaseAnalysisBlock`:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.analysis import BaseAnalysisBlock\n", - "\n", - "class ModelCorrelationHeatmap(BaseAnalysisBlock):\n", - " def __init__(self):\n", - " super().__init__()\n", - " \n", - " def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n", - " return info\n", - "\n", - " def explain(self,\n", - " row_insights: pd.DataFrame,\n", - " global_insights: Dict[str, object], **kwargs) -> Tuple[pd.DataFrame, Dict[str, object]]:\n", - " \n", - " return row_insights, global_insights" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "<__main__.ModelCorrelationHeatmap at 0x7fa85c015970>" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ModelCorrelationHeatmap()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Right now, our newly created analysis block doesn't do much, apart from returning the `info` and insights (`row_insights` and `global_insights`) exactly as it received them from the previous block.\n", - "\n", - "As previously discussed, we only need to implement a procedure that runs post-training, no action is required at inference time. This means we can use the default `.explain()` behavior in the parent class:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "class ModelCorrelationHeatmap(BaseAnalysisBlock):\n", - " def __init__(self):\n", - " super().__init__()\n", - " \n", - " def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n", - " return info" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 2: Implementing the custom analysis block\n", - "\n", - "Okay, now for the fun bit: we have to implement a correlation heatmap between the predictions of all mixers inside a `BestOf` ensemble. This is currently the only ensemble implemented in Lightwood, but it is a good idea to explicitly check that the type of the ensemble is what we expect.\n", - "\n", - "A natural question to ask at this point is: what information do we have to implement the procedure? You'll note that, apart from the `info` dictionary, we receive a `kwargs` dictionary. You can check out the full documentation for more details, but the keys (and respective value types) exposed in this object by default are:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "kwargs = {\n", - " 'predictor': 'lightwood.ensemble.BaseEnsemble',\n", - " 'target': 'str',\n", - " 'input_cols': 'list',\n", - " 'dtype_dict': 'dict',\n", - " 'normal_predictions': 'pd.DataFrame',\n", - " 'data': 'pd.DataFrame',\n", - " 'train_data': 'lightwood.data.encoded_ds.EncodedDs',\n", - " 'encoded_val_data': 'lightwood.data.encoded_ds.EncodedDs',\n", - " 'is_classification': 'bool',\n", - " 'is_numerical': 'bool',\n", - " 'is_multi_ts': 'bool',\n", - " 'stats_info': 'lightwood.api.types.StatisticalAnalysis',\n", - " 'ts_cfg': 'lightwood.api.types.TimeseriesSettings',\n", - " 'accuracy_functions': 'list',\n", - " 'has_pretrained_text_enc': 'bool'\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As you can see there is lots to work with, but for this example we will focus on using:\n", - "\n", - "1. The `predictor` ensemble\n", - "2. The `encoded_val_data` to generate predictions for each mixer inside the ensemble\n", - "\n", - "And the insight we're want to produce is a matrix that compares the output of all mixers and computes the correlation between them.\n", - "\n", - "Let's implement the algorithm:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Dict\n", - "from types import SimpleNamespace\n", - "\n", - "import numpy as np\n", - "\n", - "from lightwood.ensemble import BestOf\n", - "from lightwood.analysis import BaseAnalysisBlock\n", - "\n", - "\n", - "class ModelCorrelationHeatmap(BaseAnalysisBlock):\n", - " def __init__(self):\n", - " super().__init__()\n", - " \n", - " def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n", - " ns = SimpleNamespace(**kwargs)\n", - " \n", - " # only triggered with the right type of ensemble\n", - " if isinstance(ns.predictor, BestOf):\n", - " \n", - " # store prediction from every mixer\n", - " all_predictions = []\n", - "\n", - " for mixer in ns.predictor.mixers:\n", - " predictions = mixer(ns.encoded_val_data).values # retrieve np.ndarray from the returned pd.DataFrame\n", - " all_predictions.append(predictions.flatten().astype(int)) # flatten and cast labels to int\n", - " \n", - " # calculate correlation matrix\n", - " corrs = np.corrcoef(np.array(all_predictions))\n", - " \n", - " # save inside `info` object\n", - " info['mixer_correlation'] = corrs\n", - " \n", - " return info\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice the use of `SimpleNamespace` for dot notation accessors.\n", - "\n", - "The procedure above is fairly straightforward, as we leverage numpy's `corrcoef()` function to generate the matrix. \n", - "\n", - "Finally, it is very important to add the output to `info` so that it is saved inside the actual predictor object. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 3: Exposing the block to Lightwood\n", - "\n", - "\n", - "To use this in an arbitrary script, we need to add the above class (and all necessary imports) to a `.py` file inside one of the following directories:\n", - "\n", - "* `~/lightwood_modules` (where `~` is your home directory, e.g. `/Users/username/` for macOS and `/home/username/` for linux\n", - "* `/etc/lightwood_modules`\n", - "\n", - "Lightwood will scan these directories and import any class so that they can be found and used by the `JsonAI` code generating module.\n", - "\n", - "**To continue, please save the code cell above as `model_correlation.py` in one of the indicated directories.**" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 4: Final test run\n", - "\n", - "Ok! Everything looks set to try out our custom block. Let's generate a predictor for [this](https://github.com/mindsdb/lightwood/blob/stable/tests/data/hdi.csv) sample dataset, and see whether our new insights are any good.\n", - "\n", - "First, it is important to add our `ModelCorrelationHeatmap` to the `analysis_blocks` attribute of the Json AI object that will generate your predictor code. " - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-53131:Dropping features: []\n", - "INFO:lightwood-53131:Analyzing a sample of 222\n", - "INFO:lightwood-53131:from a total population of 225, this is equivalent to 98.7% of your data.\n", - "INFO:lightwood-53131:Using 15 processes to deduct types.\n", - "INFO:lightwood-53131:Infering type for: Population\n", - "INFO:lightwood-53131:Infering type for: Area (sq. mi.)\n", - "INFO:lightwood-53131:Infering type for: Pop. Density \n", - "INFO:lightwood-53131:Infering type for: GDP ($ per capita)\n", - "INFO:lightwood-53131:Infering type for: Literacy (%)\n", - "INFO:lightwood-53131:Infering type for: Infant mortality \n", - "INFO:lightwood-53131:Infering type for: Development Index\n", - "INFO:lightwood-53131:Column Area (sq. mi.) has data type integer\n", - "INFO:lightwood-53131:Column Population has data type integer\n", - "INFO:lightwood-53131:Column Development Index has data type categorical\n", - "INFO:lightwood-53131:Column Literacy (%) has data type float\n", - "INFO:lightwood-53131:Column GDP ($ per capita) has data type integer\n", - "INFO:lightwood-53131:Column Infant mortality has data type float\n", - "INFO:lightwood-53131:Column Pop. Density has data type float\n", - "INFO:lightwood-53131:Starting statistical analysis\n", - "INFO:lightwood-53131:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "model_correlation.py\n", - "model_correlation\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem\n", - "\n", - "# read dataset\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/stable/tests/data/hdi.csv')\n", - "\n", - "# define the predictive task\n", - "pdef = ProblemDefinition.from_dict({\n", - " 'target': 'Development Index', # column you want to predict\n", - " 'time_aim': 100,\n", - "})\n", - "\n", - "# generate the Json AI intermediate representation from the data and its corresponding settings\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)\n", - "\n", - "# add the custom list of analysis blocks; in this case, composed of a single block\n", - "json_ai.analysis_blocks = [{\n", - " 'module': 'model_correlation.ModelCorrelationHeatmap',\n", - " 'args': {}\n", - "}]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can take a look at the respective Json AI key just to confirm our newly added analysis block is in there:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[{'module': 'model_correlation.ModelCorrelationHeatmap', 'args': {}}]" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "json_ai.analysis_blocks" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we are ready to create a predictor from this Json AI, and subsequently train it:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-53131:Dropping features: []\n", - "INFO:lightwood-53131:Performing statistical analysis on data\n", - "INFO:lightwood-53131:Starting statistical analysis\n", - "INFO:lightwood-53131:Finished statistical analysis\n", - "INFO:lightwood-53131:Cleaning the data\n", - "INFO:lightwood-53131:Splitting the data into train/test\n", - "WARNING:lightwood-53131:Cannot stratify, got subsets of length: [25, 24, 23, 22, 22, 22, 22, 22, 22, 21] | Splitting without stratification\n", - "INFO:lightwood-53131:Preparing the encoders\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 1\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 2\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 3\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 4\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 5\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 6\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 7\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "model_correlation.py\n", - "model_correlation\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-53131:Done running for: Development Index\n", - "INFO:lightwood-53131:Done running for: Population\n", - "INFO:lightwood-53131:Done running for: Area (sq. mi.)\n", - "INFO:lightwood-53131:Done running for: Pop. Density \n", - "INFO:lightwood-53131:Done running for: GDP ($ per capita)\n", - "INFO:lightwood-53131:Done running for: Literacy (%)\n", - "INFO:lightwood-53131:Done running for: Infant mortality \n", - "INFO:lightwood-53131:Featurizing the data\n", - "INFO:lightwood-53131:Training the mixers\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "WARNING:lightwood-53131:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\n", - "INFO:lightwood-53131:Loss of 2.1644320487976074 with learning rate 0.0001\n", - "INFO:lightwood-53131:Loss of 2.4373621940612793 with learning rate 0.00014\n", - "INFO:lightwood-53131:Found learning rate of: 0.0001\n", - "/home/natasha/mdb/lib/python3.8/site-packages/pytorch_ranger/ranger.py:172: UserWarning: This overload of addcmul_ is deprecated:\n", - "\taddcmul_(Number value, Tensor tensor1, Tensor tensor2)\n", - "Consider using one of the following signatures instead:\n", - "\taddcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:1005.)\n", - " exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)\n", - "DEBUG:lightwood-53131:Loss @ epoch 1: 1.6043835878372192\n", - "DEBUG:lightwood-53131:Loss @ epoch 2: 1.614564061164856\n", - "DEBUG:lightwood-53131:Loss @ epoch 3: 1.6116881370544434\n", - "DEBUG:lightwood-53131:Loss @ epoch 4: 1.6085857152938843\n", - "DEBUG:lightwood-53131:Loss @ epoch 5: 1.5999916791915894\n", - "DEBUG:lightwood-53131:Loss @ epoch 6: 1.5959053039550781\n", - "DEBUG:lightwood-53131:Loss @ epoch 7: 1.5914497375488281\n", - "DEBUG:lightwood-53131:Loss @ epoch 8: 1.586897850036621\n", - "DEBUG:lightwood-53131:Loss @ epoch 9: 1.582642912864685\n", - "DEBUG:lightwood-53131:Loss @ epoch 10: 1.5786747932434082\n", - "DEBUG:lightwood-53131:Loss @ epoch 11: 1.5690934658050537\n", - "DEBUG:lightwood-53131:Loss @ epoch 12: 1.5649737119674683\n", - "DEBUG:lightwood-53131:Loss @ epoch 13: 1.5617222785949707\n", - "DEBUG:lightwood-53131:Loss @ epoch 14: 1.5580050945281982\n", - "DEBUG:lightwood-53131:Loss @ epoch 15: 1.55539071559906\n", - "DEBUG:lightwood-53131:Loss @ epoch 16: 1.5526844263076782\n", - "DEBUG:lightwood-53131:Loss @ epoch 17: 1.5471524000167847\n", - "DEBUG:lightwood-53131:Loss @ epoch 18: 1.5454663038253784\n", - "DEBUG:lightwood-53131:Loss @ epoch 19: 1.5436923503875732\n", - "DEBUG:lightwood-53131:Loss @ epoch 20: 1.5420359373092651\n", - "DEBUG:lightwood-53131:Loss @ epoch 21: 1.5407888889312744\n", - "DEBUG:lightwood-53131:Loss @ epoch 22: 1.5401763916015625\n", - "DEBUG:lightwood-53131:Loss @ epoch 23: 1.5390430688858032\n", - "DEBUG:lightwood-53131:Loss @ epoch 24: 1.53862726688385\n", - "DEBUG:lightwood-53131:Loss @ epoch 25: 1.5379230976104736\n", - "DEBUG:lightwood-53131:Loss @ epoch 26: 1.5374646186828613\n", - "DEBUG:lightwood-53131:Loss @ epoch 27: 1.5376394987106323\n", - "DEBUG:lightwood-53131:Loss @ epoch 28: 1.5372562408447266\n", - "DEBUG:lightwood-53131:Loss @ epoch 29: 1.537568211555481\n", - "DEBUG:lightwood-53131:Loss @ epoch 1: 1.5716121435165404\n", - "DEBUG:lightwood-53131:Loss @ epoch 2: 1.5647767543792725\n", - "DEBUG:lightwood-53131:Loss @ epoch 3: 1.5728715658187866\n", - "DEBUG:lightwood-53131:Loss @ epoch 4: 1.5768787622451783\n", - "DEBUG:lightwood-53131:Loss @ epoch 5: 1.5729807138442993\n", - "DEBUG:lightwood-53131:Loss @ epoch 6: 1.56294903755188\n", - "DEBUG:lightwood-53131:Loss @ epoch 7: 1.5892131805419922\n", - "INFO:lightwood-53131:Started fitting LGBM model\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:A single GBM iteration takes 0.1 seconds\n", - "INFO:lightwood-53131:Training GBM () with 176 iterations given 22 seconds constraint\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:Lightgbm model contains 880 weak estimators\n", - "INFO:lightwood-53131:Updating lightgbm model with 10.5 iterations\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:Model now has a total of 880 weak estimators\n", - "WARNING:lightwood-53131:Exception: Unspported categorical type for regression when training mixer: \n", - "INFO:lightwood-53131:Ensembling the mixer\n", - "INFO:lightwood-53131:Mixer: Neural got accuracy: 0.2916666666666667\n", - "INFO:lightwood-53131:Mixer: LightGBM got accuracy: 1.0\n", - "INFO:lightwood-53131:Picked best mixer: LightGBM\n", - "INFO:lightwood-53131:Analyzing the ensemble of mixers\n", - "INFO:lightwood-53131:Adjustment on validation requested.\n", - "INFO:lightwood-53131:Updating the mixers\n", - "DEBUG:lightwood-53131:Loss @ epoch 1: 1.532525897026062\n", - "DEBUG:lightwood-53131:Loss @ epoch 2: 1.6230510274569194\n", - "DEBUG:lightwood-53131:Loss @ epoch 3: 1.529026726881663\n", - "DEBUG:lightwood-53131:Loss @ epoch 4: 1.4609563549359639\n", - "DEBUG:lightwood-53131:Loss @ epoch 5: 1.6120732029279072\n", - "INFO:lightwood-53131:Updating lightgbm model with 10.5 iterations\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:Model now has a total of 880 weak estimators\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import code_from_json_ai, predictor_from_code\n", - "\n", - "code = code_from_json_ai(json_ai)\n", - "predictor = predictor_from_code(code)\n", - "\n", - "predictor.learn(df)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Finally, we can visualize the mixer correlation matrix:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAR8AAAD4CAYAAADVYeLDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAAR9ElEQVR4nO3bfXBV9Z3H8feHB0coRIqgYMgCasbRUgSfYH1aRx0V3QrVjsVhxmJVKmJx2qHVtlrBdjs+tCpailq0DFMpOutMDSxbHWqprIoCliiiGFpKIVanaCUiggjf/SOX9BIJD5KbbyCf10xm7jnnd3/3d0jyzrknQRGBmVlLa5e9ADNrmxwfM0vh+JhZCsfHzFI4PmaWokP2ArJ0lqJb9iJsr/ydQ7KXYHtt/bqI6LmzI202Pt2AMdmLsL0yiTOyl2B7bc7qpo74bZeZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB89lPDH36Y77zzDte9+mqTY4ZNnsz4mhrGVlfTe/DgFlxd23b++SfwxhtTqal5kBtv/EqT4y655FQiZnPiiUcD0L17V5555r/44IPHuf/+b+ww9rLLTqe6+j6WLZvC7bd/raTrbykljY+kkPSzou0JkiaW8jULrzNf0kmlfp1MS6dP59cXXNDk8cphw+heWcl9lZXMHjOGi6ZObcHVtV3t2rVjypRrGTZsIscdN47LLz+TY4+t+NS4Ll06ccMNX2Lhwjca9m3a9DG33PIoEyY8ssPY7t27ctddX+ecc25mwIBx9Or1ec4+e2DJz6XUSn3lsxm4RFKP5pxU9dr0VdvqBQv46L33mjx+zPDhVM+YAcDaF1/k4G7d6NKrV0str8065ZRKVq78O6tWvcOWLZ8wa9azDB8+5FPjfvSjUdxxxxNs2rSlYd/GjZt57rnlO+wDOPLIXtTUvMW6dXUAzJtXzaWXnlbaE2kBpf4G/gR4CPhW4wOSekp6QtKiwsdphf0TJU0oGrdMUr/CxwpJM4BlQIWkqZIWS3pN0qQSn8t+pay8nLo1axq269aupay8PHFFbUN5+aGsWbOuYXvt2ncpLz90hzGDBx9FRUVP5s5dvEdzrlz5FsccU07fvofRvn07RowYSkVFs/48T9GhBV5jCvCKpDsb7Z8M3BMR/yfp34CngGN3M1cl8LWIWAgg6QcR8Z6k9sDvJQ2MiFeaerKkMcAYgEM+48mY7QtJ3H33VYwefe8eP+f99z9k7Nhf8Nhj32XbtuD551/nqKN6l26RLaTk8YmIusLVynjgo6JD5wLHSdq+XSapy26mW709PAWXFYLSAegNHAc0GZ+IeIj6KzGOkGKvTmQ/U1dbS1nFv+41lPXpQ11tbeKK2oba2nd3uCrp0+dQamvfbdju2rUTAwb0Zf78nwDQq9fnqaq6mYsv/jFLlqxsct45cxYxZ84iAK655ny2bt1WojNoOS113+Re4Crgc41ee2hEDCp8lEfEBurfqhWv6+Cixx9ufyCpPzABOCciBgL/02hsm7aiqorjr7gCgD5DhrB5/Xo2vP128qoOfIsW1VBZeQT9+h1Ox44dGDnyTKqqXmo4Xle3kZ49R9G//9X07381Cxeu2G14AHr2rL9W79btc1x33YVMm/Z0Sc+jJbTE2y4Kb40epz5A22/lPw18E7gLQNKgiFgK/BX4z8K+E4D+TUxbRn2M1ks6HBgGzC/NGbQ+l86cSb+zzqJzjx58e80a/nDrrbTv2BGAxQ8+SM3cuVReeCHjV65ky8aNPHnllckrbhu2bt3G9dc/wFNPTaJ9+3Y88sg8li//G5MmjWLx4hpmz35pl89ftWoaZWWdOeigDowYMZTzzvshr7++hsmTr+H44+u/FW67bRY1NW+1xOmUlCJK9+5D0oaI6FJ4fDiwCrgzIiYWfgM2hfr7PB2AZyPiWkmdgCeBcuBF4N+pDwvAnIgYUDT/dOBUYA2wHqiKiOmS5gMTIqLJO3pHSDGmWc/WSm1S/c8k26/MWRIRO/2zl5Je+WwPT+HxO0Dnou11wFd38pyPgPOamHJAo7Gjm3jds/Z+tWbWktr038qYWR7Hx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNLoYjIXkMKqVvAGdnLsL1wK3Oyl2B7aRIsiYiTdnbMVz5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxaeXOP/8E3nhjKjU1D3LjjV9pctwll5xKxGxOPPFoALp378ozz/wXH3zwOPff/40dxl522elUV9/HsmVTuP32r5V0/fYvwx9+mO+88w7Xvfpqk2OGTZ7M+JoaxlZX03vw4BZcXcvbbXwkbdjJvmslXbGb542W9PMmjn2/0fbhkmZK+oukJZJekPTlwrGzJK2XtFTSK5LmSTqs6DVC0rlFc40o7Gv6O3U/0a5dO6ZMuZZhwyZy3HHjuPzyMzn22IpPjevSpRM33PAlFi58o2Hfpk0fc8stjzJhwiM7jO3evSt33fV1zjnnZgYMGEevXp/n7LMHlvxcDJZOn86vL7igyeOVw4bRvbKS+yormT1mDBdNndqCq2t5n+nKJyIeiIgZ+/C6DfGRJOC3wLMRcWREnAiMBPoUjV8QEYMiYiCwCBhXdOzVwvjtLgeq92FtrcYpp1SycuXfWbXqHbZs+YRZs55l+PAhnxr3ox+N4o47nmDTpi0N+zZu3Mxzzy3fYR/AkUf2oqbmLdatqwNg3rxqLr30tNKeiAGwesECPnrvvSaPHzN8ONUz6r+t1r74Igd360aXXr1aankt7jPFR9JESRMKj08uXJEslXSXpGVFQ4+Q9DtJNZLuLIy/HehUGP8ocDbwcUQ8sP1JEbE6Iu7fyesK6Ar8s2j3AuAUSR0ldQGOBpZ+lvNqbcrLD2XNmnUN22vXvkt5+aE7jBk8+CgqKnoyd+7iPZpz5cq3OOaYcvr2PYz27dsxYsRQKip6NOu67bMpKy+nbs2ahu26tWspKy9PXFFpdWiGOX4FXBMRLxTCUmwQMBjYDKyQdH9E3CTp+ogYBCBpPPDybl7jDElLgUOBDym6cgICmAecDxwCVAH9dzaJpDHAmPqtTnt2dq2YJO6++ypGj753j5/z/vsfMnbsL3jsse+ybVvw/POvc9RRvUu3SLMm7NMNZ0ndgK4R8UJh18xGQ34fEesjYhOwHOi7B3NOkVQtaVHR7u1vuyqoj92djZ42i/q3XiOB3zQ1d0Q8FBEnRcRJcNDulpKutvbdHa5K+vQ5lNradxu2u3btxIABfZk//yesWjWNoUOPoarq5oabzk2ZM2cRQ4dO4NRTv8OKFbW8+WZtyc7B9lxdbS1lFf+6p1fWpw91tQfu56bUv+3aXPR4Kzu/0noNOGH7RkSMA84BejYxZxVwZvGOiHgJ+CLQIyLe3JcFtyaLFtVQWXkE/fodTseOHRg58kyqql5qOF5Xt5GePUfRv//V9O9/NQsXruDii3/MkiUrdzlvz56HANCt2+e47roLmTbt6ZKeh+2ZFVVVHH9F/e9x+gwZwub169nw9tvJqyqdfXrbFRHvS/pA0pCIeJEdb/zuyhZJHSNiC/AM8BNJYyNi++39zrt47unAn3ey/yZg0x4vfj+wdes2rr/+AZ56ahLt27fjkUfmsXz535g0aRSLF9cwe/ZLu3z+qlXTKCvrzEEHdWDEiKGcd94Pef31NUyefA3HH1//zvS222ZRU/NWS5xOm3fpzJn0O+ssOvfowbfXrOEPt95K+44dAVj84IPUzJ1L5YUXMn7lSrZs3MiTV16ZvOLSUkTseoC0DSj+6rwbKAM2RMRPJQ0BfglsA/4InBQRp0kaXXh8fWGeOcBPI2K+pDuAi4GXI2KUpN7APcAQ4B/U39d5ICIek3QW8CSwChCwHrg6It5s/BpFa54OzImI/276vLoFnLG7fx9rRW5lTvYSbC9NgiX1tzk+bbfx2R1JXSJiQ+HxTUDviLhhnyZtAY7P/sfx2f/sKj7N8duuiyR9rzDXamB0M8xpZge4fY5PRDwGPNYMazGzNsT/t8vMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS6GIyF5DCkn/AFZnr6NEegDrshdhe+VA/Zz1jYieOzvQZuNzIJO0OCJOyl6H7bm2+Dnz2y4zS+H4mFkKx+fA9FD2AmyvtbnPme/5mFkKX/mYWQrHx8xSOD6tkKSQ9LOi7QmSJrbA686X1KZ+3bunJG3Yyb5rJV2xm+eNlvTzJo59v9H24ZJmSvqLpCWSXpD05cKxsyStl7RU0iuS5kk6rOg1QtK5RXONKOz7ymc535bg+LROm4FLJPVozklVz5/zZhIRD0TEjH2YoiE+kgT8Fng2Io6MiBOBkUCfovELImJQRAwEFgHjio69Whi/3eVA9T6sreT8hdg6fUL9bz++1fiApJ6SnpC0qPBxWmH/REkTisYtk9Sv8LFC0gxgGVAhaaqkxZJekzSppU7qQFP8by7p5MIVyVJJd0laVjT0CEm/k1Qj6c7C+NuBToXxjwJnAx9HxAPbnxQRqyPi/p28roCuwD+Ldi8ATpHUUVIX4GhgaTOfcrNyfFqvKcAoSYc02j8ZuCciTgYuBabtwVyVwC8i4gsRsRr4QeGvaQcC/yFpYHMuvI36FfCNiBgEbG10bBDwVeCLwFclVUTETcBHhSuZUcAXgJd38xpnSFoK/A04F3ik6FgA84DzgeFA1T6dTQtwfFqpiKgDZgDjGx06F/h54YuwCigr/KTbldURsbBo+zJJLwN/ov6L/rjmWXXbJKkb0DUiXijsmtloyO8jYn1EbAKWA333YM4pkqolLSravf1tVwX1sbuz0dNmUf/WayTwm89wKi2qQ/YCbJfupf6n4a+K9rUDhha+kBtI+oQdf5gcXPT4w6Jx/YEJwMkR8U9J0xuNtea3uejxVnb+ffca9VeyAETEuMI9v8VNzFkFPFG8IyJekvRFYGNEvFn/7qz18pVPKxYR7wGPA1cV7X4a+Ob2DUmDCg//CpxQ2HcC0L+Jacuoj9F6SYcDw5p10W1QRLwPfCBpSGHXyF0ML7ZFUsfC42eAgyWNLTreeRfPPR34807230TRjezWzFc+rd/PgOuLtscDUyS9Qv3n71ngWup/Cl4h6TXgReDNnU0WEdWS/gS8AawBnivh2g8knSWtLdq+u9Hxq4BfStoG/BFYvwdzPgS8IunliBglaQRwj6TvAv+g/ofEjUXjt9/zUWH+qxtPGBH/u4fnk87/vcKsGUjqEhEbCo9vAnpHxA3Jy2rVfOVj1jwukvQ96r+nVgOjc5fT+vnKx8xS+IazmaVwfMwsheNjZikcHzNL4fiYWYr/B6P8xHBYHfiOAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "mc = predictor.runtime_analyzer['mixer_correlation'] # newly produced insight\n", - "\n", - "mixer_names = [c.__class__.__name__ for c in predictor.ensemble.mixers]\n", - "\n", - "# plotting code\n", - "fig, ax = plt.subplots()\n", - "im = ax.imshow(mc, cmap='seismic')\n", - "\n", - "# set ticks\n", - "ax.set_xticks(np.arange(mc.shape[0]))\n", - "ax.set_yticks(np.arange(mc.shape[1]))\n", - "\n", - "# set tick labels\n", - "ax.set_xticklabels(mixer_names)\n", - "ax.set_yticklabels(mixer_names)\n", - "\n", - "# show cell values\n", - "for i in range(len(mixer_names)):\n", - " for j in range(len(mixer_names)):\n", - " text = ax.text(j, i, round(mc[i, j], 3), ha=\"center\", va=\"center\", color=\"w\")\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Nice! We've just added an additional piece of insight regarding the predictor that Lightwood came up with for the task of predicting the Human Development Index of any given country.\n", - "\n", - "What this matrix is telling us is whether the predictions of both mixers stored in the ensemble -- Neural and LightGBM -- have a high correlation or not.\n", - "\n", - "This is, of course, a very simple example, but it shows the convenience of such an abstraction within the broader pipeline that Lightwood automates.\n", - "\n", - "For more complex examples, you can check out any of the three core analysis blocks that we use:\n", - "\n", - "* `lightwood.analysis.nc.calibrate.ICP`\n", - "* `lightwood.analysis.helpers.acc_stats.AccStats`\n", - "* `lightwood.analysis.helpers.feature_importance.GlobalFeatureImportance`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/_sources/tutorials/custom_mixer/custom_mixer.ipynb.txt b/docs/_sources/tutorials/custom_mixer/custom_mixer.ipynb.txt deleted file mode 100644 index 0e5706b91..000000000 --- a/docs/_sources/tutorials/custom_mixer/custom_mixer.ipynb.txt +++ /dev/null @@ -1,530 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Implementing a custom mixer in Lightwood\n", - "\n", - "\n", - "## Introduction\n", - "\n", - "Mixers are the center piece of lightwood, tasked with learning the mapping between the encoded feature and target representation\n", - "\n", - "\n", - "## Objective\n", - "\n", - "In this tutorial we'll be trying to implement a sklearn random forest as a mixer that handles categorical and binary targets. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 1: The Mixer Interface\n", - "\n", - "The Mixer interface is defined by the `BaseMixer` class, a mixer needs methods for 4 tasks:\n", - "* fitting (`fit`)\n", - "* predicting (`__call__`)\n", - "* construction (`__init__`)\n", - "* partial fitting (`partial_fit`), though this one is optional" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 2: Writing our mixer\n", - "\n", - "I'm going to create a file called `random_forest_mixer.py` inside `/etc/lightwood_modules`, this is where lightwood sources custom modules from.\n", - "\n", - "Inside of it I'm going to write the following code:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.mixer import BaseMixer\n", - "from lightwood.api.types import PredictionArguments\n", - "from lightwood.data.encoded_ds import EncodedDs, ConcatedEncodedDs\n", - "from lightwood import dtype\n", - "from lightwood.encoder import BaseEncoder\n", - "\n", - "import torch\n", - "import pandas as pd\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "\n", - "\n", - "class RandomForestMixer(BaseMixer):\n", - " clf: RandomForestClassifier\n", - "\n", - " def __init__(self, stop_after: int, dtype_dict: dict, target: str, target_encoder: BaseEncoder):\n", - " super().__init__(stop_after)\n", - " self.target_encoder = target_encoder\n", - " # Throw in case someone tries to use this for a problem that's not classification, I'd fail anyway, but this way the error message is more intuitive\n", - " if dtype_dict[target] not in (dtype.categorical, dtype.binary):\n", - " raise Exception(f'This mixer can only be used for classification problems! Got target dtype {dtype_dict[target]} instead!')\n", - "\n", - " # We could also initialize this in `fit` if some of the parameters depend on the input data, since `fit` is called exactly once\n", - " self.clf = RandomForestClassifier(max_depth=30)\n", - "\n", - " def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None:\n", - " X, Y = [], []\n", - " # By default mixers get some train data and a bit of dev data on which to do early stopping or hyper parameter optimization. For this mixer, we don't need dev data, so we're going to concat the two in order to get more training data. Then, we're going to turn them into an sklearn friendly foramat.\n", - " for x, y in ConcatedEncodedDs([train_data, dev_data]):\n", - " X.append(x.tolist())\n", - " Y.append(y.tolist())\n", - " self.clf.fit(X, Y)\n", - "\n", - " def __call__(self, ds: EncodedDs,\n", - " args: PredictionArguments = PredictionArguments()) -> pd.DataFrame:\n", - " # Turn the data into an sklearn friendly format\n", - " X = []\n", - " for x, _ in ds:\n", - " X.append(x.tolist())\n", - "\n", - " Yh = self.clf.predict(X)\n", - "\n", - " # Lightwood encoders are meant to decode torch tensors, so we have to cast the predictions first\n", - " decoded_predictions = self.target_encoder.decode(torch.Tensor(Yh))\n", - "\n", - " # Finally, turn the decoded predictions into a dataframe with a single column called `prediction`. This is the standard behaviour all lightwood mixers use\n", - " ydf = pd.DataFrame({'prediction': decoded_predictions})\n", - "\n", - " return ydf\n", - "\n", - " \n", - " # We'll skip implementing `partial_fit`, thus making this mixer unsuitable for online training tasks" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 3: Using our mixer\n", - "\n", - "We're going to use our mixer for diagnosing heart disease using this dataset: [https://github.com/mindsdb/benchmarks/blob/main/benchmarks/datasets/heart_disease/data.csv](https://github.com/mindsdb/benchmarks/blob/main/benchmarks/datasets/heart_disease/data.csv)\n", - "\n", - "First, since we don't want to bother writing a Json AI for this dataset from scratch, we're going to let lightwood auto generate one." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-56096:Dropping features: []\n", - "INFO:lightwood-56096:Analyzing a sample of 298\n", - "INFO:lightwood-56096:from a total population of 303, this is equivalent to 98.3% of your data.\n", - "INFO:lightwood-56096:Using 15 processes to deduct types.\n", - "INFO:lightwood-56096:Infering type for: age\n", - "INFO:lightwood-56096:Infering type for: sex\n", - "INFO:lightwood-56096:Infering type for: cp\n", - "INFO:lightwood-56096:Infering type for: trestbps\n", - "INFO:lightwood-56096:Infering type for: fbs\n", - "INFO:lightwood-56096:Infering type for: chol\n", - "INFO:lightwood-56096:Infering type for: thalach\n", - "INFO:lightwood-56096:Infering type for: restecg\n", - "INFO:lightwood-56096:Infering type for: exang\n", - "INFO:lightwood-56096:Infering type for: ca\n", - "INFO:lightwood-56096:Infering type for: slope\n", - "INFO:lightwood-56096:Infering type for: thal\n", - "INFO:lightwood-56096:Column age has data type integer\n", - "INFO:lightwood-56096:Infering type for: target\n", - "INFO:lightwood-56096:Column sex has data type binary\n", - "INFO:lightwood-56096:Column fbs has data type binary\n", - "INFO:lightwood-56096:Column cp has data type categorical\n", - "INFO:lightwood-56096:Infering type for: oldpeak\n", - "INFO:lightwood-56096:Column trestbps has data type integer\n", - "INFO:lightwood-56096:Column chol has data type integer\n", - "INFO:lightwood-56096:Column thalach has data type integer\n", - "INFO:lightwood-56096:Column restecg has data type categorical\n", - "INFO:lightwood-56096:Column exang has data type binary\n", - "INFO:lightwood-56096:Column ca has data type categorical\n", - "INFO:lightwood-56096:Column slope has data type categorical\n", - "INFO:lightwood-56096:Column thal has data type categorical\n", - "INFO:lightwood-56096:Column target has data type binary\n", - "INFO:lightwood-56096:Column oldpeak has data type float\n", - "INFO:lightwood-56096:Starting statistical analysis\n", - "INFO:lightwood-56096:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "random_forest_mixer.py\n", - "random_forest_mixer\n", - "{\n", - " \"features\": {\n", - " \"age\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"sex\": {\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"cp\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"trestbps\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"chol\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"fbs\": {\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"restecg\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"thalach\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"exang\": {\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"oldpeak\": {\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"slope\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"ca\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"thal\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " }\n", - " },\n", - " \"outputs\": {\n", - " \"target\": {\n", - " \"data_dtype\": \"binary\",\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {\n", - " \"is_target\": \"True\",\n", - " \"target_class_distribution\": \"$statistical_analysis.target_class_distribution\"\n", - " }\n", - " },\n", - " \"mixers\": [\n", - " {\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": true,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"LightGBM\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"fit_on_dev\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"Regression\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\"\n", - " }\n", - " }\n", - " ],\n", - " \"ensemble\": {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " \"ts_analysis\": null\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"problem_definition\": {\n", - " \"target\": \"target\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": true,\n", - " \"seconds_per_mixer\": 2364,\n", - " \"seconds_per_encoder\": 0,\n", - " \"time_aim\": 10642.1306731291,\n", - " \"target_weights\": null,\n", - " \"positive_domain\": false,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": false,\n", - " \"order_by\": null,\n", - " \"window\": null,\n", - " \"group_by\": null,\n", - " \"use_previous_target\": true,\n", - " \"nr_predictions\": null,\n", - " \"historical_columns\": null,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": false\n", - " },\n", - " \"anomaly_detection\": true,\n", - " \"ignore_features\": [],\n", - " \"fit_on_all\": true,\n", - " \"strict_mode\": true,\n", - " \"seed_nr\": 420\n", - " },\n", - " \"identifiers\": {},\n", - " \"accuracy_functions\": [\n", - " \"balanced_accuracy_score\"\n", - " ]\n", - "}\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem\n", - "import pandas as pd\n", - "\n", - "# read dataset\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/heart_disease/data.csv')\n", - "\n", - "# define the predictive task\n", - "pdef = ProblemDefinition.from_dict({\n", - " 'target': 'target', # column you want to predict\n", - "})\n", - "\n", - "# generate the Json AI intermediate representation from the data and its corresponding settings\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)\n", - "\n", - "# Print it (you can also put it in a file and edit it there)\n", - "print(json_ai.to_json())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we have to edit the `mixers` key of this json ai to tell lightwood to use our custom mixer. We can use it together with the others, and have it ensembled with them at the end, or standalone. In this case I'm going to replace all existing mixers with this one" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "json_ai.outputs['target'].mixers = [{\n", - " 'module': 'random_forest_mixer.RandomForestMixer',\n", - " 'args': {\n", - " 'stop_after': '$problem_definition.seconds_per_mixer',\n", - " 'dtype_dict': '$dtype_dict',\n", - " 'target': '$target',\n", - " 'target_encoder': '$encoders[self.target]'\n", - "\n", - " }\n", - "}]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Then we'll generate some code, and finally turn that code into a predictor object and fit it on the original data." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "random_forest_mixer.py\n", - "random_forest_mixer\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import code_from_json_ai, predictor_from_code\n", - "\n", - "code = code_from_json_ai(json_ai)\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-56096:Dropping features: []\n", - "INFO:lightwood-56096:Performing statistical analysis on data\n", - "INFO:lightwood-56096:Starting statistical analysis\n", - "INFO:lightwood-56096:Finished statistical analysis\n", - "INFO:lightwood-56096:Cleaning the data\n", - "INFO:lightwood-56096:Splitting the data into train/test\n", - "INFO:lightwood-56096:Preparing the encoders\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 1\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 2\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 3\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 4\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 5\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 6\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 7\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 8\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 9\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 10\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 11\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 12\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 13\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 14\n", - "INFO:lightwood-56096:Done running for: target\n", - "INFO:lightwood-56096:Done running for: age\n", - "INFO:lightwood-56096:Done running for: sex\n", - "INFO:lightwood-56096:Done running for: cp\n", - "INFO:lightwood-56096:Done running for: trestbps\n", - "INFO:lightwood-56096:Done running for: chol\n", - "INFO:lightwood-56096:Done running for: fbs\n", - "INFO:lightwood-56096:Done running for: restecg\n", - "INFO:lightwood-56096:Done running for: thalach\n", - "INFO:lightwood-56096:Done running for: exang\n", - "INFO:lightwood-56096:Done running for: oldpeak\n", - "INFO:lightwood-56096:Done running for: slope\n", - "INFO:lightwood-56096:Done running for: ca\n", - "INFO:lightwood-56096:Done running for: thal\n", - "INFO:lightwood-56096:Featurizing the data\n", - "INFO:lightwood-56096:Training the mixers\n", - "INFO:lightwood-56096:Ensembling the mixer\n", - "INFO:lightwood-56096:Mixer: RandomForestMixer got accuracy: 0.8149038461538461\n", - "INFO:lightwood-56096:Picked best mixer: RandomForestMixer\n", - "INFO:lightwood-56096:Analyzing the ensemble of mixers\n", - "INFO:lightwood-56096:Adjustment on validation requested.\n", - "INFO:lightwood-56096:Updating the mixers\n" - ] - } - ], - "source": [ - "predictor.learn(df)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Finally, we can use the trained predictor to make some predictions, or save it to a pickle for later use" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-56096:Dropping features: []\n", - "INFO:lightwood-56096:Cleaning the data\n", - "INFO:lightwood-56096:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\n", - "INFO:lightwood-56096:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " prediction truth confidence\n", - "0 0 None 0.95\n", - "1 0 None 0.94\n", - "2 1 None 0.97\n" - ] - } - ], - "source": [ - "predictions = predictor.predict(pd.DataFrame({\n", - " 'age': [63, 15, None],\n", - " 'sex': [1, 1, 0],\n", - " 'thal': [3, 1, 1]\n", - "}))\n", - "print(predictions)\n", - "\n", - "predictor.save('my_custom_heart_disease_predictor.pickle')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "That's it, all it takes to solve a predictive problem with lightwood using your own custom mixer." - ] - } - ], - "metadata": { - "interpreter": { - "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" - }, - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/_sources/tutorials/custom_splitter/custom_splitter.ipynb.txt b/docs/_sources/tutorials/custom_splitter/custom_splitter.ipynb.txt deleted file mode 100644 index 174ed0695..000000000 --- a/docs/_sources/tutorials/custom_splitter/custom_splitter.ipynb.txt +++ /dev/null @@ -1,1387 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "israeli-spyware", - "metadata": {}, - "source": [ - "## Build your own training/testing split\n", - "\n", - "#### Date: 2021.10.07\n", - "\n", - "When working with machine learning data, splitting into a \"train\", \"dev\" (or validation) and \"test\") set is important. Models use **train** data to learn representations and update their parameters; **dev** or validation data is reserved to see how the model may perform on unknown predictions. While it may not be explicitly trained on, it can be used as a stopping criteria, for hyper-parameter tuning, or as a simple sanity check. Lastly, **test** data is always reserved, hidden from the model, as a final pass to see what models perform best.\n", - "\n", - "Lightwood supports a variety of **encoders** (Feature engineering procedures) and **mixers** (predictor algorithms that go from feature vectors to the target). Given the diversity of algorithms, it is appropriate to split data into these three categories when *preparing* encoders or *fitting* mixers.\n", - "\n", - "Our default approach stratifies labeled data to ensure your train, validation, and test sets are equally represented in all classes. However, in many instances you may want a custom technique to build your own splits. We've included the `splitter` functionality (default found in `lightwood.data.splitter`) to enable you to build your own.\n", - "\n", - "In the following problem, we shall work with a Kaggle dataset around credit card fraud (found [here](https://www.kaggle.com/mlg-ulb/creditcardfraud)). Fraud detection is difficult because the events we are interested in catching are thankfully rare events. Because of that, there is a large **imbalance of classes** (in fact, in this dataset, less than 1% of the data are the rare-event).\n", - "\n", - "In a supervised technique, we may want to ensure our training data sees the rare event of interest. A random shuffle could potentially miss rare events. We will implement **SMOTE** to increase the number of positive classes in our training data.\n", - "\n", - "Let's get started!" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "interim-discussion", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import pandas as pd\n", - "import torch\n", - "import nltk\n", - "import matplotlib.pyplot as plt\n", - "\n", - "import os\n", - "import sys\n", - "\n", - "# Lightwood modules\n", - "import lightwood as lw\n", - "from lightwood import ProblemDefinition, \\\n", - " JsonAI, \\\n", - " json_ai_from_problem, \\\n", - " code_from_json_ai, \\\n", - " predictor_from_code\n", - "\n", - "import imblearn # Vers 0.5.0 minimum requirement" - ] - }, - { - "cell_type": "markdown", - "id": "decimal-techno", - "metadata": {}, - "source": [ - "### 1) Load your data\n", - "\n", - "Lightwood works with `pandas` DataFrames. We can use pandas to load our data. Please download the dataset from the above link and place it in a folder called `data/` where this notebook is located." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "foreign-orchestra", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
TimeV1V2V3V4V5V6V7V8V9...V21V22V23V24V25V26V27V28AmountClass
00.0-1.359807-0.0727812.5363471.378155-0.3383210.4623880.2395990.0986980.363787...-0.0183070.277838-0.1104740.0669280.128539-0.1891150.133558-0.021053149.620
10.01.1918570.2661510.1664800.4481540.060018-0.082361-0.0788030.085102-0.255425...-0.225775-0.6386720.101288-0.3398460.1671700.125895-0.0089830.0147242.690
21.0-1.358354-1.3401631.7732090.379780-0.5031981.8004990.7914610.247676-1.514654...0.2479980.7716790.909412-0.689281-0.327642-0.139097-0.055353-0.059752378.660
31.0-0.966272-0.1852261.792993-0.863291-0.0103091.2472030.2376090.377436-1.387024...-0.1083000.005274-0.190321-1.1755750.647376-0.2219290.0627230.061458123.500
42.0-1.1582330.8777371.5487180.403034-0.4071930.0959210.592941-0.2705330.817739...-0.0094310.798278-0.1374580.141267-0.2060100.5022920.2194220.21515369.990
\n", - "

5 rows × 31 columns

\n", - "
" - ], - "text/plain": [ - " Time V1 V2 V3 V4 V5 V6 V7 \\\n", - "0 0.0 -1.359807 -0.072781 2.536347 1.378155 -0.338321 0.462388 0.239599 \n", - "1 0.0 1.191857 0.266151 0.166480 0.448154 0.060018 -0.082361 -0.078803 \n", - "2 1.0 -1.358354 -1.340163 1.773209 0.379780 -0.503198 1.800499 0.791461 \n", - "3 1.0 -0.966272 -0.185226 1.792993 -0.863291 -0.010309 1.247203 0.237609 \n", - "4 2.0 -1.158233 0.877737 1.548718 0.403034 -0.407193 0.095921 0.592941 \n", - "\n", - " V8 V9 ... V21 V22 V23 V24 V25 \\\n", - "0 0.098698 0.363787 ... -0.018307 0.277838 -0.110474 0.066928 0.128539 \n", - "1 0.085102 -0.255425 ... -0.225775 -0.638672 0.101288 -0.339846 0.167170 \n", - "2 0.247676 -1.514654 ... 0.247998 0.771679 0.909412 -0.689281 -0.327642 \n", - "3 0.377436 -1.387024 ... -0.108300 0.005274 -0.190321 -1.175575 0.647376 \n", - "4 -0.270533 0.817739 ... -0.009431 0.798278 -0.137458 0.141267 -0.206010 \n", - "\n", - " V26 V27 V28 Amount Class \n", - "0 -0.189115 0.133558 -0.021053 149.62 0 \n", - "1 0.125895 -0.008983 0.014724 2.69 0 \n", - "2 -0.139097 -0.055353 -0.059752 378.66 0 \n", - "3 -0.221929 0.062723 0.061458 123.50 0 \n", - "4 0.502292 0.219422 0.215153 69.99 0 \n", - "\n", - "[5 rows x 31 columns]" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Load the data\n", - "ddir = \"data/\"\n", - "filename = os.path.join(ddir, \"creditcard.csv.zip\")\n", - "\n", - "data = pd.read_csv(filename)\n", - "data.head()" - ] - }, - { - "cell_type": "markdown", - "id": "rental-contribution", - "metadata": {}, - "source": [ - "We see **31 columns**, most of these columns appear numerical. Due to confidentiality reasons, the Kaggle dataset mentions that the columns labeled $V_i$ indicate principle components (PCs) from a PCA analysis of the original data from the credit card company. There is also a \"Time\" and \"Amount\", two original features that remained. The time references time after the first transaction in the dataset, and amount is how much money was considered in the transaction. \n", - "\n", - "You can also see a heavy imbalance in the two classes below:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "cathedral-mills", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Text(0.5, 1.0, 'Distribution of Classes')" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAAUQUlEQVR4nO3dfbRddX3n8feHEGDxkOvS2AeQECwPmlp19Irj1FpabU2QlJauRcEHsEVTnKEdHwe0dJZO69SZWVpbpdVUaawuQ5naqaREadXRMDNMJbCkgJROTAMElfBkEhhUAt/54+zsOb3em5wb7r47J3m/1jqL7N/e+3e+94acz/n9fvvsk6pCkiSAQ/ouQJK0/zAUJEktQ0GS1DIUJEktQ0GS1DIUJEktQ0HzLslHkvz2HPW1JMnDSRY0219O8oa56Lvp73NJLpir/mbxvL+b5P4k397H87ckecVc16UDn6GgOdW8GD2aZGeS7yT5X0kuStL+v1ZVF1XV74zY1x5f2Krqrqo6uqoen4Pa353kU1P6X1FVn3iyfc+yjiXA24BlVfUjMxyzKMkHk9zVhOI3mu3F81mrDjyGgrqwsqqOAU4A3gdcAnx8rp8kyaFz3ed+YgnwQFVtm25nksOALwI/DiwHFgEvAR4ATpuvInVgMhTUmaraXlVXA78CXJDkOQBJ1iT53ebPi5P8dTOqeDDJdUkOSfJJBi+O65p3wv8uydIkleTCJHcBXxpqGw6IH0vy1SQ7knw2yVOb5zo9ydbhGnePRpIsB94F/ErzfDc3+9vpqKauy5LcmWRbkj9LMtHs213HBc279/uT/NZMv5skE8359zX9Xdb0/wrgb4FjmzrWTHP6+c3v5peq6utV9URVbauq36mq9dM812lJrm9+x99K8uEmWMjA7zc/z44ktwz9PZ2R5OvNqO+eJG8f6vPMJF8bGg0+d2jfJc3xO5PckeTlM/0etP8xFNS5qvoqsBX4qWl2v63Z93Tghxm8MFdVvQ64i8Go4+iq+s9D5/w08GzglTM85fnArwE/CuwC/nCEGj8P/Efgz5vne940h72+efwM8EzgaODDU455KXAq8HLg3yd59gxP+SFgounnp5uaf7WqvgCsAL7Z1PH6ac59BfD5qnp4bz9X43HgLcBiBiOKlwP/utn388DLgFOaes5hMOKAweju15tR33OALwEk+RfAFcCvA08DPgpcneTwJKcCFwMvas57JbBlxDq1HzAUNF++CTx1mvbHGLx4n1BVj1XVdbX3G3K9u6oeqapHZ9j/yaq6taoeAX4bOGf3QvST9BrgA1W1uXlBfidw7pRRynuq6tGquhm4GfiBcGlqORd4Z1XtrKotwPuB141Yx9OAb41adFXdWFX/u6p2Nc/1UQZBBIPf/zHAs4BU1e1V9a2hfcuSLKqqh6rqpqZ9FfDRqvq7qnq8WXP5HvAvGQTQ4c15C6tqS1V9Y9Ra1T9DQfPlOODBadr/C7AJ+Jskm5NcOkJfd89i/53AQgbvkp+sY5v+hvs+lMEIZ7fhq4X+L4PRxFSLm5qm9nXciHU8wCBIR5LklGaK7ttJdjAYES0GqKovMRjtXA5sS7I6yaLm1F8GzgDuTPKVJC9p2k8A3tZMHX0nyXeA44Fjq2oT8Gbg3U1/VyY5dtRa1T9DQZ1L8iIGL3j/Y+q+5p3y26rqmcAvAG8dmoOeacSwt5HE8UN/XsLgHe/9wCPAkUN1LWAwbTVqv99k8II43Pcu4N69nDfV/U1NU/u6Z8TzvwC8MslRIx7/x8A/ACdX1SIGU3TZvbOq/rCqXggsYzCN9I6m/YaqOgv4IeCvgKuaU+4G3ltVTxl6HFlVa5vzPl1VL21+vgL+04h1aj9gKKgzzWWTZwJXAp+qqlumOebMJCclCbCdwfTDE83uexnMuc/Wa5MsS3Ik8B+Av2guWf1H4Igkr0qyELiMwVTHbvcCSzN0+ewUa4G3JDkxydH8/zWIXbMprqnlKuC9SY5JcgLwVuBTez6z9UkGL8yfSfKsZoH6aUneleSMaY4/BtgBPJzkWcCbdu9I8qIkL25+H48A3wWeSHJYktckmaiqx5rzd/+9/AlwUXNekhzV/E6PSXJqkp9NcnjT16ND52kMGArqwrokOxm8cP0W8AHgV2c49mQG73wfBq4H/qiq/nuz7/eAy5opirfPcP50PgmsYTCVcwTwmzC4GorBAuvHGLwrf4TBIvdu/7X57wNJbuIHXdH0vQH4JwYver8xi7qG/Ubz/JsZjKA+3fS/V1X1PQaLzf/A4EqlHcBXGUwJ/d00p7wdeDWwk8EL+p8P7VvUtD3EYArrAQZTejBY49jSTDldxGBNharaCLyRwbTTQwym/17fnHM4g8uQ72fw+/8hBmsvGhPxS3YkSbs5UpAktQwFSVLLUJAktQwFSVJrrG8otnjx4lq6dGnfZUjSWLnxxhvvr6qnT7dvrENh6dKlbNy4se8yJGmsJLlzpn1OH0mSWoaCJKllKEiSWoaCJKllKEiSWoaCJKllKEiSWmMZCklWJlm9ffv2vkuRpAPKWH54rarWAesmJyffuK99LL30mjmsqF9b3veqvkuQdIAYy5GCJKkbhoIkqWUoSJJahoIkqWUoSJJahoIkqWUoSJJahoIkqWUoSJJahoIkqTWWoeC9jySpG2MZClW1rqpWTUxM9F2KJB1QxjIUJEndMBQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSa39JhSSnJ7kuiQfSXJ63/VI0sGo01BIckWSbUlundK+PMkdSTYlubRpLuBh4Ahga5d1SZKm1/VIYQ2wfLghyQLgcmAFsAw4L8ky4LqqWgFcAryn47okSdPoNBSqagPw4JTm04BNVbW5qr4PXAmcVVVPNPsfAg7vsi5J0vQO7eE5jwPuHtreCrw4ydnAK4GnAB+e6eQkq4BVAEuWLOmuSkk6CPURCtOqqr8E/nKE41YDqwEmJyer67ok6WDSx9VH9wDHD20/o2mTJPWsj1C4ATg5yYlJDgPOBa6eTQdJViZZvX379k4KlKSDVdeXpK4FrgdOTbI1yYVVtQu4GLgWuB24qqpum02/VbWuqlZNTEzMfdGSdBDrdE2hqs6boX09sL7L55Ykzd5+84lmSVL/xjIUXFOQpG6MZSi4piBJ3RjLUJAkdcNQkCS1xjIUXFOQpG6MZSi4piBJ3RjLUJAkdcNQkCS1DAVJUmssQ8GFZknqxliGggvNktSNsQwFSVI3DAVJUstQkCS1xjIUXGiWpG6MZSi40CxJ3RjLUJAkdcNQkCS1DAVJUstQkCS1DAVJUmssQ8FLUiWpG2MZCl6SKkndGMtQkCR1w1CQJLUMBUlSy1CQJLUMBUlSy1CQJLUMBUlSayxDwQ+vSVI3xjIU/PCaJHVjLENBktSNWYVCkkOSLOqqGElSv/YaCkk+nWRRkqOAW4GvJ3lH96VJkubbKCOFZVW1A/hF4HPAicDruixKktSPUUJhYZKFDELh6qp6rNuSJEl9GSUUPgpsAY4CNiQ5AfBaUEk6AI0SCuuq6riqOqOqCrgL+LWO65Ik9WCUUPjM8EYTDFd2U44kqU+HzrQjybOAHwcmkpw9tGsRcETXhUmS5t+MoQCcCpwJPAVYOdS+E3hjhzVJknoyYyhU1WeBzyZ5SVVdP481SZJ6sqeRwm6bkrwLWDp8fFX1tticZCWw8qSTTuqrBEk6II2y0PxZYAL4AnDN0KM33hBPkroxykjhyKq6pPNKJEm9G2Wk8NdJzui8EklS70YJhX/LIBgeTbIjyc4kO7ouTJI0//Y6fVRVx8xHIZKk/u01FJK8bLr2qtow9+VIkvo0ykLz8HcnHAGcBtwI/GwnFUmSejPK9NHwp5lJcjzwwa4KkiT1Z1++o3kr8Oy5LkSS1L9R1hQ+BFSzeQjwfOCmDmuSJPVklDWFjUN/3gWsrar/2VE9kqQejbKm8IkkhwGnNE13dFuSJKkvo0wfnQ58gsFXcgY4PskFXpIqSQeeUaaP3g/8fFXdAZDkFGAt8MIuC5Mkzb9Rrj5auDsQAKrqH4GF3ZUkSerLSAvNST4GfKrZfi3/fPFZknSAGCUU3gT8G+A3m+0NwB93VpEkqTczTh8leXqSZVX1var6QFWdXVVnA38LLOqimCRHJdmY5Mwu+pck7dme1hQ+BCyepv2pwB+M0nmSK5JsS3LrlPblSe5IsinJpUO7LgGuGqVvSdLc21MonDTdZadVdR3w3BH7XwMsH25IsgC4HFgBLAPOS7Isyc8BXwe2jdi3JGmO7WlNYU/fozDS1UdVtSHJ0inNpwGbqmozQJIrgbOAo4GjGATFo0nWV9UTU/tMsgpYBbBkyZJRypAkjWhPobApyRlVtX64MckKYPOTeM7jgLuHtrcCL66qi5v+Xw/cP10gAFTVamA1wOTkZE13jCRp3+wpFN4MXJPkHAbfnwAwCbwE6GwhuKrWdNW3JGnPZlxTqKr/A/wE8BVgafP4CvDc5gNs++oe4Pih7Wc0bZKknu3xcwpV9T3gT+f4OW8ATk5yIoMwOBd49Ww6SLISWHnSSSfNcWmSdHDbly/ZGVmStcD1wKlJtia5sKp2ARcD1wK3A1dV1W2z6beq1lXVqomJibkvWpIOYqN8onmfVdV5M7SvB9ZPt0+S1J9ORwpdSbIyyert27f3XYokHVD2GgpJbkny91Me1yX5/SRPm48ip3L6SJK6Mcr00eeAx4FPN9vnAkcC32bwieWVnVQmSZp3o4TCK6rqBUPbtyS5qapekOS1XRUmSZp/o6wpLEhy2u6NJC8CFjSbuzqpai9cU5CkbowSCm8APp7kn5JsAT4OvCHJUcDvdVncTFxTkKRu7HX6qKpuAH4iyUSzPfz23NtcS9IBZJSrjyaSfAD4IvDFJO/fHRCSpAPLKNNHVwA7gXOaxw7m/tYXkqT9wChXH/1YVf3y0PZ7knyto3pG4r2PJKkbo4wUHk3y0t0bSX4SeLS7kvbOhWZJ6sYoI4WLgD8bWkd4CLigu5IkSX0Z5eqjm4HnJVnUbO9I8mbg7zuuTZI0z0a+IV5V7aiqHc3mWzuqR5LUo329S2rmtApJ0n5hX0Oh5rSKWfI2F5LUjRlDIcnOJDumeewEjp3HGn+AVx9JUjdmXGiuqmPmsxBJUv/G8pvXJEndMBQkSS1DQZLUMhQkSa2xDAUvSZWkboxlKHhJqiR1YyxDQZLUDUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJrbEMBT+8JkndGMtQ8MNrktSNsQwFSVI3DAVJUstQkCS1DAVJUstQkCS1DAVJUstQkCS1DAVJUstQkCS1DAVJUmssQ8F7H0lSN8YyFLz3kSR1YyxDQZLUDUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJrf0mFJI8O8lHkvxFkjf1XY8kHYw6DYUkVyTZluTWKe3Lk9yRZFOSSwGq6vaqugg4B/jJLuuSJE2v65HCGmD5cEOSBcDlwApgGXBekmXNvl8ArgHWd1yXJGkanYZCVW0AHpzSfBqwqao2V9X3gSuBs5rjr66qFcBrZuozyaokG5NsvO+++7oqXZIOSof28JzHAXcPbW8FXpzkdOBs4HD2MFKoqtXAaoDJycnqrEpJOgj1EQrTqqovA1/uuQxJOqj1cfXRPcDxQ9vPaNpGlmRlktXbt2+f08Ik6WDXRyjcAJyc5MQkhwHnAlfPpoOqWldVqyYmJjopUJIOVl1fkroWuB44NcnWJBdW1S7gYuBa4Hbgqqq6rcs6JEmj6XRNoarOm6F9PV52Kkn7nf3mE82z4ZqCJHVjLEPBNQVJ6sZYhoIkqRuGgiSpNZah4JqCJHVjLEPBNQVJ6sZYhoIkqRuGgiSpZShIklpjGQouNEtSN8YyFFxolqRujGUoSJK6YShIklqGgiSpNZah4EKzJHVjv/mO5tmoqnXAusnJyTf2XYuk8bH00mv6LmHObHnfqzrpdyxHCpKkbhgKkqSWoSBJahkKkqSWoSBJao1lKHhJqiR1YyxDwXsfSVI3xjIUJEndMBQkSa1UVd817LMk9wF39l3HXiwG7u+7CEnzZhz+zZ9QVU+fbsdYh8I4SLKxqib7rkPS/Bj3f/NOH0mSWoaCJKllKHRvdd8FSJpXY/1v3jUFSVLLkYIkqWUoSJJahkJHkixPckeSTUku7bseSd1KckWSbUlu7buWJ8NQ6ECSBcDlwApgGXBekmX9ViWpY2uA5X0X8WQZCt04DdhUVZur6vvAlcBZPdckqUNVtQF4sO86nixDoRvHAXcPbW9t2iRpv2YoSJJahkI37gGOH9p+RtMmSfs1Q6EbNwAnJzkxyWHAucDVPdckSXtlKHSgqnYBFwPXArcDV1XVbf1WJalLSdYC1wOnJtma5MK+a9oX3uZCktRypCBJahkKkqSWoSBJahkKkqSWoSBJahkK0oiS/EiSK5N8I8mNSdYnOWXc74opDTu07wKkcZAkwH8DPlFV5zZtzwN+uNfCpDnmSEEazc8Aj1XVR3Y3VNXNDN34MMnSJNclual5/Kum/UeTbEjytSS3JvmpJAuSrGm2b0nylvn/kaQf5EhBGs1zgBv3csw24Oeq6rtJTgbWApPAq4Frq+q9zXdtHAk8Hziuqp4DkOQpXRUuzYahIM2dhcCHkzwfeBw4pWm/AbgiyULgr6rqa0k2A89M8iHgGuBv+ihYmsrpI2k0twEv3MsxbwHuBZ7HYIRwGLRfvvIyBnfKXZPk/Kp6qDnuy8BFwMe6KVuaHUNBGs2XgMOTrNrdkOS5/PNbpE8A36qqJ4DXAQua404A7q2qP2Hw4v+CJIuBQ6rqM8BlwAvm58eQ9szpI2kEVVVJfgn4YJJLgO8CW4A3Dx32R8BnkpwPfB54pGk/HXhHkseAh4HzGXwT358m2f3G7J1d/wzSKLxLqiSp5fSRJKllKEiSWoaCJKllKEiSWoaCJKllKEiSWoaCJKn1/wAv2shaiu0DZwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "f = plt.figure()\n", - "ax = f.add_subplot(1,1,1)\n", - "ax.hist(data['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True)\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Distribution of Classes\")" - ] - }, - { - "cell_type": "markdown", - "id": "exact-timeline", - "metadata": {}, - "source": [ - "### 2) Create a JSON-AI default object\n", - "We will now create JSON-AI syntax for our problem based on its specifications. We can do so by setting up a ``ProblemDefinition``. The ``ProblemDefinition`` allows us to specify the target, the column we intend to predict, along with other details. \n", - "\n", - "The end goal of JSON-AI is to provide **a set of instructions on how to compile a machine learning pipeline*.\n", - "\n", - "Our target here is called \"**Class**\", which indicates \"0\" for no fraud and \"1\" for fraud. We'll generate the JSON-AI with the minimal syntax:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "medieval-zambia", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-51500:Dropping features: []\n", - "INFO:lightwood-51500:Analyzing a sample of 18424\n", - "INFO:lightwood-51500:from a total population of 284807, this is equivalent to 6.5% of your data.\n", - "INFO:lightwood-51500:Using 15 processes to deduct types.\n", - "INFO:lightwood-51500:Infering type for: Time\n", - "INFO:lightwood-51500:Infering type for: V1\n", - "INFO:lightwood-51500:Infering type for: V2\n", - "INFO:lightwood-51500:Infering type for: V3\n", - "INFO:lightwood-51500:Infering type for: V4\n", - "INFO:lightwood-51500:Infering type for: V5\n", - "INFO:lightwood-51500:Infering type for: V6\n", - "INFO:lightwood-51500:Infering type for: V7\n", - "INFO:lightwood-51500:Infering type for: V8\n", - "INFO:lightwood-51500:Infering type for: V9\n", - "INFO:lightwood-51500:Infering type for: V10\n", - "INFO:lightwood-51500:Infering type for: V11\n", - "INFO:lightwood-51500:Infering type for: V12\n", - "INFO:lightwood-51500:Infering type for: V13\n", - "INFO:lightwood-51500:Infering type for: V14\n", - "INFO:lightwood-51500:Column Time has data type integer\n", - "INFO:lightwood-51500:Infering type for: V15\n", - "INFO:lightwood-51500:Column V4 has data type float\n", - "INFO:lightwood-51500:Infering type for: V16\n", - "INFO:lightwood-51500:Column V2 has data type float\n", - "INFO:lightwood-51500:Infering type for: V17\n", - "INFO:lightwood-51500:Column V3 has data type float\n", - "INFO:lightwood-51500:Column V1 has data type float\n", - "INFO:lightwood-51500:Infering type for: V18\n", - "INFO:lightwood-51500:Infering type for: V19\n", - "INFO:lightwood-51500:Column V6 has data type float\n", - "INFO:lightwood-51500:Column V5 has data type float\n", - "INFO:lightwood-51500:Infering type for: V20\n", - "INFO:lightwood-51500:Column V7 has data type float\n", - "INFO:lightwood-51500:Infering type for: V21\n", - "INFO:lightwood-51500:Column V8 has data type float\n", - "INFO:lightwood-51500:Infering type for: V22\n", - "INFO:lightwood-51500:Infering type for: V23\n", - "INFO:lightwood-51500:Column V9 has data type float\n", - "INFO:lightwood-51500:Infering type for: V24\n", - "INFO:lightwood-51500:Column V10 has data type float\n", - "INFO:lightwood-51500:Column V13 has data type float\n", - "INFO:lightwood-51500:Column V12 has data type float\n", - "INFO:lightwood-51500:Infering type for: V25\n", - "INFO:lightwood-51500:Column V11 has data type float\n", - "INFO:lightwood-51500:Infering type for: V26\n", - "INFO:lightwood-51500:Column V14 has data type float\n", - "INFO:lightwood-51500:Infering type for: V28\n", - "INFO:lightwood-51500:Infering type for: V27\n", - "INFO:lightwood-51500:Infering type for: Amount\n", - "INFO:lightwood-51500:Column V15 has data type float\n", - "INFO:lightwood-51500:Infering type for: Class\n", - "INFO:lightwood-51500:Column V16 has data type float\n", - "INFO:lightwood-51500:Column V17 has data type float\n", - "INFO:lightwood-51500:Column Class has data type binary\n", - "INFO:lightwood-51500:Column Amount has data type float\n", - "INFO:lightwood-51500:Column V23 has data type float\n", - "INFO:lightwood-51500:Column V18 has data type float\n", - "INFO:lightwood-51500:Column V19 has data type float\n", - "INFO:lightwood-51500:Column V20 has data type float\n", - "INFO:lightwood-51500:Column V28 has data type float\n", - "INFO:lightwood-51500:Column V21 has data type float\n", - "INFO:lightwood-51500:Column V22 has data type float\n", - "INFO:lightwood-51500:Column V26 has data type float\n", - "INFO:lightwood-51500:Column V24 has data type float\n", - "INFO:lightwood-51500:Column V25 has data type float\n", - "INFO:lightwood-51500:Column V27 has data type float\n", - "INFO:lightwood-51500:Starting statistical analysis\n", - "INFO:lightwood-51500:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Setup the problem definition\n", - "problem_definition = {\n", - " 'target': 'Class',\n", - "}\n", - "\n", - "# Generate the j{ai}son syntax\n", - "default_json = json_ai_from_problem(data, problem_definition)\n" - ] - }, - { - "cell_type": "markdown", - "id": "deadly-rotation", - "metadata": {}, - "source": [ - "Lightwood looks at each of the many columns and indicates they are mostly float, with exception of \"**Class**\" which is binary.\n", - "\n", - "You can observe the JSON-AI if you run the command `print(default_json.to_json())`. Given there are many input features, we won't print it out." - ] - }, - { - "cell_type": "markdown", - "id": "immune-clone", - "metadata": {}, - "source": [ - "These are the only elements required to get off the ground with JSON-AI. However, we're interested in making a *custom* approach. So, let's make this syntax a file, and introduce our own changes." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "owned-translator", - "metadata": {}, - "outputs": [], - "source": [ - "with open(\"default.json\", \"w\") as fp:\n", - " fp.write(default_json.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "massive-divide", - "metadata": {}, - "source": [ - "### 3) Build your own splitter module\n", - "\n", - "For Lightwood, the goal of a splitter is to intake an initial dataset (pre-processed ideally, although you can run the pre-processor on each DataFrame within the splitter) and return a dictionary with the keys \"train\", \"test\", and \"dev\" (at minimum). Subsequent steps of the pipeline expect the keys \"train\", \"test\", and \"dev\", so it's important you assign datasets to these as necessary. \n", - "\n", - "We're going to introduce SMOTE sampling in our splitter. SMOTE allows you to quickly learn an approximation to make extra \"samples\" that mimic the undersampled class. \n", - "\n", - "We will use the package `imblearn` and `scikit-learn` to quickly create a train/test split and apply SMOTE to our training data only.\n", - "\n", - "**NOTE** This is simply an example of things you can do with the splitter; whether SMOTE sampling is ideal for your problem depends on the question you're trying to answer!" - ] - }, - { - "cell_type": "markdown", - "id": "comparable-diameter", - "metadata": {}, - "source": [ - "```\n", - "from lightwood.api.dtype import dtype\n", - "import pandas as pd\n", - "import numpy as np\n", - "from typing import List, Dict\n", - "from itertools import product\n", - "from lightwood.api.types import TimeseriesSettings\n", - "from lightwood.helpers.log import log\n", - "\n", - "\n", - "from imblearn.over_sampling import SMOTE\n", - "from sklearn.model_selection import train_test_split\n", - "\n", - "\n", - "def MySplitter(\n", - " data: pd.DataFrame,\n", - " target: str,\n", - " pct_train: float = 0.8,\n", - " pct_dev: float = 0.1,\n", - " seed: int = 1,\n", - ") -> Dict[str, pd.DataFrame]:\n", - " \"\"\"\n", - " Custom splitting function\n", - "\n", - "\n", - " :param data: Input data\n", - " :param target: Name of the target\n", - " :param pct_train: Percentage of data reserved for training, taken out of full data\n", - " :param pct_dev: Percentage of data reserved for dev, taken out of train data\n", - " :param seed: Random seed for reproducibility\n", - "\n", - " :returns: A dictionary containing the keys train, test and dev with their respective data frames.\n", - " \"\"\"\n", - "\n", - " # Shuffle the data\n", - " data = data.sample(frac=1, random_state=seed).reset_index(drop=True)\n", - "\n", - " # Split into feature columns + target\n", - " X = data.iloc[:, data.columns != target] # .values\n", - " y = data[target] # .values\n", - "\n", - " # Create a train/test split\n", - " X_train, X_test, y_train, y_test = train_test_split(\n", - " X, y, train_size=pct_train, random_state=seed, stratify=data[target]\n", - " )\n", - "\n", - " X_train, X_dev, y_train, y_dev = train_test_split(\n", - " X, y, test_size=pct_dev, random_state=seed, stratify=y_train\n", - " )\n", - "\n", - " # Create a SMOTE model and bump up underbalanced class JUST for train data\n", - " SMOTE_model = SMOTE(random_state=seed)\n", - "\n", - " Xtrain_mod, ytrain_mod = SMOTE_model.fit_resample(X_train, y_train.ravel())\n", - "\n", - " Xtrain_mod[target] = ytrain_mod\n", - " X_test[target] = y_test\n", - " X_dev[target] = y_dev\n", - "\n", - " return {\"train\": Xtrain_mod, \"test\": X_test, \"dev\": X_dev}\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "analyzed-radical", - "metadata": {}, - "source": [ - "#### Place your custom module in `~/lightwood_modules`\n", - "\n", - "We automatically search for custom scripts in your `~/lightwood_modules` path. Place your file there. Later, you'll see when we autogenerate code, that you can change your import location if you choose." - ] - }, - { - "cell_type": "markdown", - "id": "lucky-blair", - "metadata": {}, - "source": [ - "### 4) Introduce your custom splitter in JSON-AI\n", - "\n", - "Now let's introduce our custom splitter. JSON-AI keeps a lightweight syntax but fills in many default modules (like splitting, cleaning).\n", - "\n", - "For the custom cleaner, we'll work by editing the \"splitter\" key. We will change properties within it as follows:\n", - "(1) \"module\" - place the name of the function. In our case it will be \"MyCustomCleaner.cleaner\"\n", - "(2) \"args\" - any keyword argument specific to your cleaner's internals. \n", - "\n", - "This will look as follows:\n", - "```\n", - " \"splitter\": {\n", - " \"module\": \"MyCustomSplitter.MySplitter\",\n", - " \"args\": {\n", - " \"data\": \"data\",\n", - " \"target\": \"$target\",\n", - " \"pct_train\": 0.8,\n", - " \"pct_dev\": 0.1,\n", - " \"seed\": 1\n", - " }\n", - " },\n", - "```\n", - "\n", - "Let's copy our file `default.json` into `custom.json` and add this block. Then, we can proceed as usual to create python code." - ] - }, - { - "cell_type": "markdown", - "id": "identical-georgia", - "metadata": {}, - "source": [ - "### 5) Generate Python code representing your ML pipeline\n", - "\n", - "Now we're ready to load up our custom JSON-AI and generate the predictor code!\n", - "\n", - "We can do this by first reading in our custom json-syntax, and then calling the function `code_from_json_ai`. " - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "alleged-concentrate", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n", - "import lightwood\n", - "from lightwood.analysis import *\n", - "from lightwood.api import *\n", - "from lightwood.data import *\n", - "from lightwood.encoder import *\n", - "from lightwood.ensemble import *\n", - "from lightwood.helpers.device import *\n", - "from lightwood.helpers.general import *\n", - "from lightwood.helpers.log import *\n", - "from lightwood.helpers.numeric import *\n", - "from lightwood.helpers.parallelism import *\n", - "from lightwood.helpers.seed import *\n", - "from lightwood.helpers.text import *\n", - "from lightwood.helpers.torch import *\n", - "from lightwood.mixer import *\n", - "import pandas as pd\n", - "from typing import Dict, List\n", - "import os\n", - "from types import ModuleType\n", - "import importlib.machinery\n", - "import sys\n", - "\n", - "\n", - "for import_dir in [os.path.expanduser(\"~/lightwood_modules\"), \"/etc/lightwood_modules\"]:\n", - " if os.path.exists(import_dir) and os.access(import_dir, os.R_OK):\n", - " for file_name in list(os.walk(import_dir))[0][2]:\n", - " print(file_name)\n", - " if file_name[-3:] != \".py\":\n", - " continue\n", - " mod_name = file_name[:-3]\n", - " print(mod_name)\n", - " loader = importlib.machinery.SourceFileLoader(\n", - " mod_name, os.path.join(import_dir, file_name)\n", - " )\n", - " module = ModuleType(loader.name)\n", - " loader.exec_module(module)\n", - " sys.modules[mod_name] = module\n", - " exec(f\"import {mod_name}\")\n", - "\n", - "\n", - "class Predictor(PredictorInterface):\n", - " target: str\n", - " mixers: List[BaseMixer]\n", - " encoders: Dict[str, BaseEncoder]\n", - " ensemble: BaseEnsemble\n", - " mode: str\n", - "\n", - " def __init__(self):\n", - " seed(420)\n", - " self.target = \"Class\"\n", - " self.mode = \"inactive\"\n", - " self.problem_definition = ProblemDefinition.from_dict(\n", - " {\n", - " \"target\": \"Class\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": True,\n", - " \"seconds_per_mixer\": 14354,\n", - " \"seconds_per_encoder\": 0,\n", - " \"time_aim\": 64593.50573948541,\n", - " \"target_weights\": None,\n", - " \"positive_domain\": False,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": False,\n", - " \"order_by\": None,\n", - " \"window\": None,\n", - " \"group_by\": None,\n", - " \"use_previous_target\": True,\n", - " \"nr_predictions\": None,\n", - " \"historical_columns\": None,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": False,\n", - " },\n", - " \"anomaly_detection\": True,\n", - " \"ignore_features\": [],\n", - " \"fit_on_all\": True,\n", - " \"strict_mode\": True,\n", - " \"seed_nr\": 420,\n", - " }\n", - " )\n", - " self.accuracy_functions = [\"balanced_accuracy_score\"]\n", - " self.identifiers = {}\n", - " self.dtype_dict = {\n", - " \"Class\": \"binary\",\n", - " \"Time\": \"integer\",\n", - " \"V1\": \"float\",\n", - " \"V2\": \"float\",\n", - " \"V3\": \"float\",\n", - " \"V4\": \"float\",\n", - " \"V5\": \"float\",\n", - " \"V6\": \"float\",\n", - " \"V7\": \"float\",\n", - " \"V8\": \"float\",\n", - " \"V9\": \"float\",\n", - " \"V10\": \"float\",\n", - " \"V11\": \"float\",\n", - " \"V12\": \"float\",\n", - " \"V13\": \"float\",\n", - " \"V14\": \"float\",\n", - " \"V15\": \"float\",\n", - " \"V16\": \"float\",\n", - " \"V17\": \"float\",\n", - " \"V18\": \"float\",\n", - " \"V19\": \"float\",\n", - " \"V20\": \"float\",\n", - " \"V21\": \"float\",\n", - " \"V22\": \"float\",\n", - " \"V23\": \"float\",\n", - " \"V24\": \"float\",\n", - " \"V25\": \"float\",\n", - " \"V26\": \"float\",\n", - " \"V27\": \"float\",\n", - " \"V28\": \"float\",\n", - " \"Amount\": \"float\",\n", - " }\n", - "\n", - " # Any feature-column dependencies\n", - " self.dependencies = {\n", - " \"Time\": [],\n", - " \"V1\": [],\n", - " \"V2\": [],\n", - " \"V3\": [],\n", - " \"V4\": [],\n", - " \"V5\": [],\n", - " \"V6\": [],\n", - " \"V7\": [],\n", - " \"V8\": [],\n", - " \"V9\": [],\n", - " \"V10\": [],\n", - " \"V11\": [],\n", - " \"V12\": [],\n", - " \"V13\": [],\n", - " \"V14\": [],\n", - " \"V15\": [],\n", - " \"V16\": [],\n", - " \"V17\": [],\n", - " \"V18\": [],\n", - " \"V19\": [],\n", - " \"V20\": [],\n", - " \"V21\": [],\n", - " \"V22\": [],\n", - " \"V23\": [],\n", - " \"V24\": [],\n", - " \"V25\": [],\n", - " \"V26\": [],\n", - " \"V27\": [],\n", - " \"V28\": [],\n", - " \"Amount\": [],\n", - " }\n", - "\n", - " self.input_cols = [\n", - " \"Time\",\n", - " \"V1\",\n", - " \"V2\",\n", - " \"V3\",\n", - " \"V4\",\n", - " \"V5\",\n", - " \"V6\",\n", - " \"V7\",\n", - " \"V8\",\n", - " \"V9\",\n", - " \"V10\",\n", - " \"V11\",\n", - " \"V12\",\n", - " \"V13\",\n", - " \"V14\",\n", - " \"V15\",\n", - " \"V16\",\n", - " \"V17\",\n", - " \"V18\",\n", - " \"V19\",\n", - " \"V20\",\n", - " \"V21\",\n", - " \"V22\",\n", - " \"V23\",\n", - " \"V24\",\n", - " \"V25\",\n", - " \"V26\",\n", - " \"V27\",\n", - " \"V28\",\n", - " \"Amount\",\n", - " ]\n", - "\n", - " # Initial stats analysis\n", - " self.statistical_analysis = None\n", - "\n", - " def analyze_data(self, data: pd.DataFrame) -> None:\n", - " # Perform a statistical analysis on the unprocessed data\n", - "\n", - " log.info(\"Performing statistical analysis on data\")\n", - " self.statistical_analysis = lightwood.data.statistical_analysis(\n", - " data, self.dtype_dict, {}, self.problem_definition\n", - " )\n", - "\n", - " # Instantiate post-training evaluation\n", - " self.analysis_blocks = [\n", - " ICP(\n", - " fixed_significance=None,\n", - " confidence_normalizer=False,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " ),\n", - " AccStats(deps=[\"ICP\"]),\n", - " GlobalFeatureImportance(disable_column_importance=False),\n", - " ]\n", - "\n", - " def preprocess(self, data: pd.DataFrame) -> pd.DataFrame:\n", - " # Preprocess and clean data\n", - "\n", - " log.info(\"Cleaning the data\")\n", - " data = cleaner(\n", - " data=data,\n", - " pct_invalid=self.problem_definition.pct_invalid,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Time-series blocks\n", - "\n", - " return data\n", - "\n", - " def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]:\n", - " # Split the data into training/testing splits\n", - "\n", - " log.info(\"Splitting the data into train/test\")\n", - " train_test_data = MyCustomSplitter.MySplitter(\n", - " data=data, pct_train=0.8, pct_dev=0.1, seed=1, target=self.target\n", - " )\n", - "\n", - " return train_test_data\n", - "\n", - " def prepare(self, data: Dict[str, pd.DataFrame]) -> None:\n", - " # Prepare encoders to featurize data\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " if self.statistical_analysis is None:\n", - " raise Exception(\"Please run analyze_data first\")\n", - "\n", - " # Column to encoder mapping\n", - " self.encoders = {\n", - " \"Class\": Binary.BinaryEncoder(\n", - " is_target=True,\n", - " target_class_distribution=self.statistical_analysis.target_class_distribution,\n", - " ),\n", - " \"Time\": Integer.NumericEncoder(),\n", - " \"V1\": Float.NumericEncoder(),\n", - " \"V2\": Float.NumericEncoder(),\n", - " \"V3\": Float.NumericEncoder(),\n", - " \"V4\": Float.NumericEncoder(),\n", - " \"V5\": Float.NumericEncoder(),\n", - " \"V6\": Float.NumericEncoder(),\n", - " \"V7\": Float.NumericEncoder(),\n", - " \"V8\": Float.NumericEncoder(),\n", - " \"V9\": Float.NumericEncoder(),\n", - " \"V10\": Float.NumericEncoder(),\n", - " \"V11\": Float.NumericEncoder(),\n", - " \"V12\": Float.NumericEncoder(),\n", - " \"V13\": Float.NumericEncoder(),\n", - " \"V14\": Float.NumericEncoder(),\n", - " \"V15\": Float.NumericEncoder(),\n", - " \"V16\": Float.NumericEncoder(),\n", - " \"V17\": Float.NumericEncoder(),\n", - " \"V18\": Float.NumericEncoder(),\n", - " \"V19\": Float.NumericEncoder(),\n", - " \"V20\": Float.NumericEncoder(),\n", - " \"V21\": Float.NumericEncoder(),\n", - " \"V22\": Float.NumericEncoder(),\n", - " \"V23\": Float.NumericEncoder(),\n", - " \"V24\": Float.NumericEncoder(),\n", - " \"V25\": Float.NumericEncoder(),\n", - " \"V26\": Float.NumericEncoder(),\n", - " \"V27\": Float.NumericEncoder(),\n", - " \"V28\": Float.NumericEncoder(),\n", - " \"Amount\": Float.NumericEncoder(),\n", - " }\n", - "\n", - " # Prepare the training + dev data\n", - " concatenated_train_dev = pd.concat([data[\"train\"], data[\"dev\"]])\n", - "\n", - " log.info(\"Preparing the encoders\")\n", - "\n", - " encoder_prepping_dict = {}\n", - "\n", - " # Prepare encoders that do not require learned strategies\n", - " for col_name, encoder in self.encoders.items():\n", - " if not encoder.is_trainable_encoder:\n", - " encoder_prepping_dict[col_name] = [\n", - " encoder,\n", - " concatenated_train_dev[col_name],\n", - " \"prepare\",\n", - " ]\n", - " log.info(\n", - " f\"Encoder prepping dict length of: {len(encoder_prepping_dict)}\"\n", - " )\n", - "\n", - " # Setup parallelization\n", - " parallel_prepped_encoders = mut_method_call(encoder_prepping_dict)\n", - " for col_name, encoder in parallel_prepped_encoders.items():\n", - " self.encoders[col_name] = encoder\n", - "\n", - " # Prepare the target\n", - " if self.target not in parallel_prepped_encoders:\n", - " if self.encoders[self.target].is_trainable_encoder:\n", - " self.encoders[self.target].prepare(\n", - " data[\"train\"][self.target], data[\"dev\"][self.target]\n", - " )\n", - " else:\n", - " self.encoders[self.target].prepare(\n", - " pd.concat([data[\"train\"], data[\"dev\"]])[self.target]\n", - " )\n", - "\n", - " # Prepare any non-target encoders that are learned\n", - " for col_name, encoder in self.encoders.items():\n", - " if encoder.is_trainable_encoder:\n", - " priming_data = pd.concat([data[\"train\"], data[\"dev\"]])\n", - " kwargs = {}\n", - " if self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"] = {}\n", - " for col in self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"][col] = {\n", - " \"original_type\": self.dtype_dict[col],\n", - " \"data\": priming_data[col],\n", - " }\n", - "\n", - " # If an encoder representation requires the target, provide priming data\n", - " if hasattr(encoder, \"uses_target\"):\n", - " kwargs[\"encoded_target_values\"] = parallel_prepped_encoders[\n", - " self.target\n", - " ].encode(priming_data[self.target])\n", - "\n", - " encoder.prepare(\n", - " data[\"train\"][col_name], data[\"dev\"][col_name], **kwargs\n", - " )\n", - "\n", - " def featurize(self, split_data: Dict[str, pd.DataFrame]):\n", - " # Featurize data into numerical representations for models\n", - "\n", - " log.info(\"Featurizing the data\")\n", - " feature_data = {key: None for key in split_data.keys()}\n", - "\n", - " for key, data in split_data.items():\n", - " feature_data[key] = EncodedDs(self.encoders, data, self.target)\n", - "\n", - " return feature_data\n", - "\n", - " def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Fit predictors to estimate target\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " log.info(\"Training the mixers\")\n", - "\n", - " # --------------- #\n", - " # Fit Models\n", - " # --------------- #\n", - " # Assign list of mixers\n", - " self.mixers = [\n", - " Neural(\n", - " fit_on_dev=True,\n", - " search_hyperparameters=True,\n", - " net=\"DefaultNet\",\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target_encoder=self.encoders[self.target],\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " ),\n", - " LightGBM(\n", - " fit_on_dev=True,\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " ),\n", - " Regression(\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " target_encoder=self.encoders[self.target],\n", - " ),\n", - " ]\n", - "\n", - " # Train mixers\n", - " trained_mixers = []\n", - " for mixer in self.mixers:\n", - " try:\n", - " mixer.fit(encoded_train_data, encoded_dev_data)\n", - " trained_mixers.append(mixer)\n", - " except Exception as e:\n", - " log.warning(f\"Exception: {e} when training mixer: {mixer}\")\n", - " if True and mixer.stable:\n", - " raise e\n", - "\n", - " # Update mixers to trained versions\n", - " self.mixers = trained_mixers\n", - "\n", - " # --------------- #\n", - " # Create Ensembles\n", - " # --------------- #\n", - " log.info(\"Ensembling the mixer\")\n", - " # Create an ensemble of mixers to identify best performing model\n", - " self.pred_args = PredictionArguments()\n", - " self.ensemble = BestOf(\n", - " ts_analysis=None,\n", - " data=encoded_test_data,\n", - " accuracy_functions=self.accuracy_functions,\n", - " target=self.target,\n", - " mixers=self.mixers,\n", - " )\n", - " self.supports_proba = self.ensemble.supports_proba\n", - "\n", - " def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Evaluate quality of fit for the ensemble of mixers\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " # --------------- #\n", - " # Analyze Ensembles\n", - " # --------------- #\n", - " log.info(\"Analyzing the ensemble of mixers\")\n", - " self.model_analysis, self.runtime_analyzer = model_analyzer(\n", - " data=encoded_test_data,\n", - " train_data=encoded_train_data,\n", - " stats_info=self.statistical_analysis,\n", - " ts_cfg=self.problem_definition.timeseries_settings,\n", - " accuracy_functions=self.accuracy_functions,\n", - " predictor=self.ensemble,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " analysis_blocks=self.analysis_blocks,\n", - " )\n", - "\n", - " def learn(self, data: pd.DataFrame) -> None:\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # Perform stats analysis\n", - " self.analyze_data(data)\n", - "\n", - " # Pre-process the data\n", - " clean_data = self.preprocess(data)\n", - "\n", - " # Create train/test (dev) split\n", - " train_dev_test = self.split(clean_data)\n", - "\n", - " # Prepare encoders\n", - " self.prepare(train_dev_test)\n", - "\n", - " # Create feature vectors from data\n", - " enc_train_test = self.featurize(train_dev_test)\n", - "\n", - " # Prepare mixers\n", - " self.fit(enc_train_test)\n", - "\n", - " # Analyze the ensemble\n", - " self.analyze_ensemble(enc_train_test)\n", - "\n", - " # ------------------------ #\n", - " # Enable model partial fit AFTER it is trained and evaluated for performance with the appropriate train/dev/test splits.\n", - " # This assumes the predictor could continuously evolve, hence including reserved testing data may improve predictions.\n", - " # SET `json_ai.problem_definition.fit_on_all=False` TO TURN THIS BLOCK OFF.\n", - "\n", - " # Update the mixers with partial fit\n", - " if self.problem_definition.fit_on_all:\n", - "\n", - " log.info(\"Adjustment on validation requested.\")\n", - " update_data = {\n", - " \"new\": enc_train_test[\"test\"],\n", - " \"old\": ConcatedEncodedDs(\n", - " [enc_train_test[\"train\"], enc_train_test[\"dev\"]]\n", - " ),\n", - " } # noqa\n", - "\n", - " self.adjust(update_data)\n", - "\n", - " def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Update mixers with new information\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data\n", - " encoded_old_data = new_data[\"old\"]\n", - " encoded_new_data = new_data[\"new\"]\n", - "\n", - " # --------------- #\n", - " # Adjust (Update) Mixers\n", - " # --------------- #\n", - " log.info(\"Updating the mixers\")\n", - "\n", - " for mixer in self.mixers:\n", - " mixer.partial_fit(encoded_new_data, encoded_old_data)\n", - "\n", - " def predict(self, data: pd.DataFrame, args: Dict = {}) -> pd.DataFrame:\n", - "\n", - " # Remove columns that user specifies to ignore\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - " for col in self.input_cols:\n", - " if col not in data.columns:\n", - " data[col] = [None] * len(data)\n", - "\n", - " # Clean the data\n", - " self.mode = \"predict\"\n", - " log.info(\"Cleaning the data\")\n", - " data = cleaner(\n", - " data=data,\n", - " pct_invalid=self.problem_definition.pct_invalid,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Featurize the data\n", - " encoded_ds = EncodedDs(self.encoders, data, self.target)\n", - " encoded_data = encoded_ds.get_encoded_data(include_target=False)\n", - "\n", - " self.pred_args = PredictionArguments.from_dict(args)\n", - " df = self.ensemble(encoded_ds, args=self.pred_args)\n", - "\n", - " if self.pred_args.all_mixers:\n", - " return df\n", - " else:\n", - " insights, global_insights = explain(\n", - " data=data,\n", - " encoded_data=encoded_data,\n", - " predictions=df,\n", - " ts_analysis=None,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " analysis=self.runtime_analyzer,\n", - " target_name=self.target,\n", - " target_dtype=self.dtype_dict[self.target],\n", - " explainer_blocks=self.analysis_blocks,\n", - " fixed_confidence=self.pred_args.fixed_confidence,\n", - " anomaly_error_rate=self.pred_args.anomaly_error_rate,\n", - " anomaly_cooldown=self.pred_args.anomaly_cooldown,\n", - " )\n", - " return insights\n", - "\n" - ] - } - ], - "source": [ - "# Make changes to your JSON-file and load the custom version\n", - "with open('custom.json', 'r') as fp:\n", - " modified_json = JsonAI.from_json(fp.read())\n", - "\n", - "#Generate python code that fills in your pipeline\n", - "code = code_from_json_ai(modified_json)\n", - "\n", - "print(code)\n", - "\n", - "# Save code to a file (Optional)\n", - "with open('custom_splitter_pipeline.py', 'w') as fp:\n", - " fp.write(code)" - ] - }, - { - "cell_type": "markdown", - "id": "dental-beauty", - "metadata": {}, - "source": [ - "As you can see, an end-to-end pipeline of our entire ML procedure has been generating. There are several abstracted functions to enable transparency as to what processes your data goes through in order to build these models.\n", - "\n", - "The key steps of the pipeline are as follows:\n", - "\n", - "(1) Run a **statistical analysis** with `analyze_data`
\n", - "(2) Clean your data with `preprocess`
\n", - "(3) Make a training/dev/testing split with `split`
\n", - "(4) Prepare your feature-engineering pipelines with `prepare`
\n", - "(5) Create your features with `featurize`
\n", - "(6) Fit your predictor models with `fit`
\n", - "\n", - "You can customize this further if necessary, but you have all the steps necessary to train a model!\n", - "\n", - "We recommend familiarizing with these steps by calling the above commands, ideally in order. Some commands (namely `prepare`, `featurize`, and `fit`) do depend on other steps.\n", - "\n", - "If you want to omit the individual steps, we recommend your simply call the `learn` method, which compiles all the necessary steps implemented to give your fully trained predictive models starting with unprocessed data! " - ] - }, - { - "cell_type": "markdown", - "id": "amended-oklahoma", - "metadata": {}, - "source": [ - "### 6) Call python to run your code and see your preprocessed outputs\n", - "\n", - "Once we have code, we can turn this into a python object by calling `predictor_from_code`. This instantiates the `PredictorInterface` object. \n", - "\n", - "This predictor object can be then used to run your pipeline." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "organic-london", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Turn the code above into a predictor object\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "fabulous-prime", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-51500:Cleaning the data\n", - "INFO:lightwood-51500:Splitting the data into train/test\n", - "/home/natasha/lightwood_modules/MyCustomSplitter.py:56: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame.\n", - "Try using .loc[row_indexer,col_indexer] = value instead\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " X_test[target] = y_test\n", - "/home/natasha/lightwood_modules/MyCustomSplitter.py:57: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame.\n", - "Try using .loc[row_indexer,col_indexer] = value instead\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " X_dev[target] = y_dev\n" - ] - } - ], - "source": [ - "# Pre-process the data\n", - "cleaned_data = predictor.preprocess(data)\n", - "train_test_data = predictor.split(cleaned_data)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "suspended-biography", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABP4AAAFVCAYAAAB/4yFKAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAA6fElEQVR4nO3df5yldV3//8dTQAVi11XAH4QCrolQflI3f6QlaqlgG6mY/dAiE0Irv6al5cdvrvTVAgMttXAjw8ofpaG2iSGIkFakYEmIqIkrKv5AXQZ18UfL6/vHdY0chjOzZ2bPzLnOdR732+3cZs51vc91vWZ2mCfzuq7zfqeqkCRJkiRJktQvt5t0AZIkSZIkSZLGz8afJEmSJEmS1EM2/iRJkiRJkqQesvEnSZIkSZIk9ZCNP0mSJEmSJKmHbPxJkiRJkiRJPWTjT+qBJIclqSRbJl2LJEmSJEnqBht/0ipom3CjPg6bdL2SJM1LcsyCnNqVZEeSK5O8Icnjk2TSdUqSZsuk/sZKcmKS547reNJa23vSBUg99fQFz38MOBnYCrx/wb7rx3C+zwD7Av87hmNJkgTwZuA8IMABwH2BnwF+CbgwyVOq6oaJVSdJmjVr/TfWvBOBw4BXjfGY0pqx8Setgqr628HnSfamCaV/X7hvoSQHVNXXl3m+Ar617EIlSVrch4fk2fOA04Hn0TQGj51EYZKk2bMnf2NJs8y3+koTlGR7kouTPCDJ+UnmgCvafQck+f+S/EeSryT5dpL/SfJHSfZbcJzbzPE3uC3JTyX5UJJvJflCkle0QTl4jP2SHJnk7mvxtUuSpk9V7aqq5wMfAB6f5BHz+5KsT3Jam1XfTnJ9kjcnOWJgzLFtNj1n2PGT/Hv7un3MJUnSSqTxrCSXJ9mZ5BtJ3pfkUUPG/lKSDya5Ick3k1yT5I1JDmr3bwceCdxrwVuJj2n3m1XqPBt/0uTdE7iI5u26vwO8ut1+CPBM4DLgD2jurvgw8ALg7cs4/nHA64F3A78FfAT47fY4gx4MfAz4w5V8EZKkmfKX7ccnQNP0A/4NeDbwLuA3gdcAjwb+I8m92vHvAb5I83bhW0lyH+ChwJuq6ruYS5Kklfkbmgz6H5q/eV4CrAcuSPLT84OSPB14A807p34feC7wtzRTWxzcDnsucDXwFZq3Gs8/PtbuN6vUeb7VV5q8w4GTqursBduvAQ5t//iZ99okfwC8OMmDq+qDIxz/aODoqtoOkOQs4L9p/ih7+R5XL0maRVe0H3+g/XgqcATw0Kr6yPygJOfQZM5LgROraleSvwV+O8lRVXXVwDHnm4FvWNXKJUm9leSJwC8Cv1ZVWwe2/wlwKfAnSba1UyU9Efg68OiqGpwr/ffnP6mqd7QLe+zr24k1rbzjT5q8rwF/tXBjVX1nvumXZO8kG5IcCFzYDnnIiMd/x3zTrz1uAe8D7pbk+wa2X1xVqaoTV/ZlSJJmyI3tx3XtCr+/CPwL8PkkB84/gG/S/KH12IHXzjf2vnfXX3uMpwFXVtWHwVySJK3I02iaee9YkEd3ArbRLNJxn3bsHLAf8ISVrlZvVmkaeMefNHmfqqpdw3YkeTZwCs1dewsb9RtGPP41Q7Z9tf14F+AbIx5HkqR569qPNwIH0eTJY1l8FcWb5z+pqiuTfBj4xSQvqqqbgR+n+WNs4TQUkiQtx/1oVqL/0hJj7gp8gubdTz8OvAP4apJLaKZH+rvlLrYodZmNP2nydg7b2K6ceAbNfEh/ClwHfIdm7r9zGP2O3aFNxfnTjFylJEm3uH/78ePckiUXAqeN+Pq/Bl5FMwfghTR3/+2imVtJkqSVCs1FqF9YYsyVAFX1ySRHAY9pH48E/gJ4aZIfr6pPrXax0lqw8Sd119OB7cCx7d0QACR5/MQqkiSp8avtx3fR/IF1A7Cuqi5c9BW39ibgFcAvJflX4ATggqr6wrgLlSTNlE/SzD97aVXt9p1NVfVt4Lz2QZLjaLLtecCvzw9bnVKlteEcf1J37aIJme/dlZdkb+B3V+NkLkUvSdqdJHsl+WPgEcB5VfWv7cWpNwIPTnLCIq87ePB5VV1P83aqJ9HMD7iOBYt6mEuSpBX4a5o+x9BVdpPcdeDzA4cM+XD78c4D274BbBg2D6BZpWngHX9Sd72NJrDeneRcmj+KfgH47pKvWrkH0yz68QbgxFU6hyRpejwwydPazw8A7gv8DHAvmmkoBt9G9X+BhwN/n+TvaRb0+E479jjgcm6bLW8AfppmWos5mjmWBplLkqRlqaq3Jfkr4DeSPBD4J+ArwPcDDwM20qxCD/CeJDcA7wc+S7MAyIk0N1/8zcBhLwV+CnhNkn+juUHjoqr6MmaVpoCNP6m7XkFzt9+vAn8CfBH4O5oVgK+aYF2SpNnw8+3jZpq7HT4HXAK8uar+eXBgVc0leTjwfOBngeOB/21f8wHg7CHH/yeale3vDJxdVd9apa9DkjRDquoZSd4HnAz8HnB7mr+lPtw+n/fnNJn1azRZ9FXgP4HfrKr3DYx7JU2z8ASahRdvBzwK+PLqfiXSeKTKt6tLkiRJkiRJfeMcf5IkSZIkSVIP2fiTJEmSJEmSesjGnyRJkiRJktRDNv4kSZIkSZKkHrLxJ0mSJEmSJPWQjT9JkiRJkiSph2z8adUlOTFJJTlmkuecRB2TPO+eSHJgkr9Ocl1b+8VjPv45SWqcx5SkPWFWmVVDjm9WSeoMc8qcGnJ8c0ojsfGnkSU5pv2FNf/YlWRHkiuTvCHJ45NkzOfckuRnxnnM1dB+b7YkudOkaxmTM4CnAmcBTwdetrsXJNk7yTOSXJDk+iTfSfLVJO9L8ptJ9lvtoiXJrFqcWWVWSZo8c2px5pQ5pdWRKhvEGk17deV9wJuB84AABwD3BX4GuCdwIfCUqrph4HV7AfsA36mqm5d5zgLeUFUnLvN1tzlnkhOBvwIeVVUXL+d4I5xvC/AS4PCq2r67WrouyXXAZVX10yOOPwj4R+ChwH8A24AvAHcCfhz4KeDcqvrZdvw5wC9X1Vj/p0aSzKolz7cFs8qskjRR5tSS59uCOWVOaez2nnQBmkofrqq/HdyQ5HnA6cDzaELs2Pl9VbUL2LUWhSU5oKq+vpbn3J0u1bIMdwO+NsrA9ork22gC6jlV9eoFQ85Mch/gKeMtUZKWZFYtQ5dqWQazStI0M6eWoUu1LIM5pU7wrb4ai6raVVXPBz4APD7JI+b3ZfjcEHdsb+P+eJKdSW5I8t9JXtHuPyy3zFfwy4O3ww8co9LMa/CYJB9I8g2aqyK7mwNi7/bcn0ny7SRXJPm5hYPmjz9k+62O3Y55Sbv70wO1blmqljRzPrw2yWfbW7g/2z6/yyLne3SS307yqbbuTyT55SFf31BJ9k/yhwOv/2KaOSfuNTBmS/s9Drf+vp+4xKF/iuYK1N8NCSgAquqTVfXy3dR3ZJI/S/LRJF9vfy4uT/LMIWPvnOSV7dfyrTS3v1+e5HcWjPulJB9sf76+meSaJG9sr6YNjrtPkr9J8oX232J7klck2X/BuEOTvH7gZ+fLSf5tOf8OkibHrDKrMKskdZg5ZU5hTmkVeMefxu0vgUcAT6AJrMW8FngG8NfAmTQ/i/cBHt3uv55mHoS/Ad4PbF3kOJuAJwN/AbxhxBpPA/YH/qx9/ivAm5PcsarOGfEYg14HrAOeCPwW8JV2+xWLvSDJeuDfgI3A64EPAw8AngU8OsmDq+rrC172cmDf9nzfbseek+R/qupflyowyT7A+cDDaa4knUHz/X4W8Ngkm6rqc8C5wP9w2+/7vy1x+BPaj4v9G43qGJqw+yfg0zT/Rk8B/iLJQVX1hwNj39qOPYvm+7wvcL/2GPP/o/N0mp+J9wO/D9wEHAocBxxM8zNGkgcBFwE30HxvPw/8H+A5wMOTPLKqvptkb+AC4BCan51PAOuB+wM/xug/f5Imz6wyq1bqGMwqSavPnDKnVuoYzCktVFU+fIz0oPkFUMBvLzHmge2YfxjYdmK77ZiBbV8DzhvhnAWcs8S+An5iyL5h55zf9hlg/cD29e22rwH77u7cixx7S7vtsBHHv6zd9uwFY3+93f4HQ17/n8DtB7YfQhNWbx7h+3hSe4zTF2x/Qrv9b0b9vg859uXt+Dsv42fpnObXz6227T9k3O2Ai4E5YJ+Bf68C/mw35zgXuBHYezfjPgJcDRywYPsT2/Oc2D6/f/v8BXvy35EPHz5W92FWLXlss8qs8uHDx4Qf5tSSxzanzCkfq/Dwrb4atxvbj+t2M24OODrJD+7h+T5SVRcu8zV/XlVz80/az88CNtAE8Vp4Is3VkYVXdF7Xbn/ikNf8WVV9Z/5JVX2e5grJfUY8383A4BUequpdwH8BxydZ6e+D+X/rG5cctRtV9c35z9O8beEuwJ2B97TnOLLdfRNNOD8kyWFLHHIO2A94QjJ8ZbQkP0QTPm8C7tC+VeDAJAfSXF39JvDYgeMBPCrJwSv4EiV1h1k1GrNqAbNK0hoxp0ZjTi1gTmkYG38at1F/YT2XJhT+u51T4OwkK/lF+YnlFgh8bMi2q9qPR6zgeCtxOPDxqvrfwY3t808sUsc1Q7Z9FbjLkO3DznddVe0Ysu+jNCuJHTjCcYaZ/7c+YIWvByDJ9yX54yTX0gTRV2gCe37Z+w0AbVA/F/hBmvk/Pprk1Ukes+CQL6e56vgO4Pok/5DkmUkG67xf+/Gl7bkGH1+muTX+ru15P9PW8ljgC+38F6cn+ZE9+bolTYRZNRqzagGzStIaMadGY04tYE5pGBt/Grf7tx8/vtSgqnoncBjNnBMXAY+h+WVycZLbL+N8O5df4h6b1NyYi61iNenl269sPz5gD4/zJpoVzM4DfhF4PPCTwCvb/d/7fVVVZ9H8/JxEM5fHCcCFSd4yMOaTwFE0t96/AbgXzbwlVye5dzts/nt3RnuuYY8XDBzzxTRXA58LfAp4JvDBJKft4dcuaW2ZVavHrGqZVZL2gDm1esypljk1O2z8adx+tf34rt0NrKqvVdXfVtVJNFdjTqeZ0PP4VawPbrkiMeio9uPgFaCv0dwWvdCwK0e1zBquAe7bTm76Pe3zH2D4lag9cQ1wjyR3GrLvKJorTF8Zsm8U/9B+vM1KUaNq6/opmnkxTqmqN1XV+e1bDr4z7DVV9YWqOruqng58P/Bm4KmDV4uq6ttVdV5VPb+qNtEE1j1owhDgk+3HXVV14SKPyxec95qqenVV/Wx7rH8BXuCt6tJUMatGY1YNMKskrSFzajTm1ABzSoux8aexSLJXkj+mWX3qvFpiRaR27J0Gt1XV/ESrcOtg+AbDg2JPPCvNClDz9awHTqFZgeiSgXGfAB6WZL+BsRtoVqxa6Bvtx1FrfQdwELf9xX5Su/3tIx5nVO+g+e/9dwc3JjmW5qrSP1bVzSs89jaaX9Q/n+TZwwYk2Zjk95Y4xvyVt1tdaUtydxZ8j5LsN/hvAlBVu7hlxa87t+OG3Wb/4cExND9zVwKnJLnN/3wk2TvJ/PHWp1nJa/C83+KWtzlsWPSrk9QJZpVZhVllVkkdZk6ZU5hT5tQqmNTttZpuD0zytPbzA4D7Aj9Dc9vve4Bf2M3rD6B5P/8/0vyS+DLNfAnPAnbQ/NKbdynwE0leCFxLk2dvYc98BfiPJH/VPv8V4J7AM6tq8Db31wB/C1yU5G+AO9GEyGeAuy045qXtx9OSvBH4FnBlVV3JcKfTLKv+2iQPpPk+PIDm6t7H2/3jdA7wy8AL00ze+i80y94/G/gS8KKVHriqKskJNP9ur02z5Ps/Al+k+Z49AvhpbrmKNewYX0/yHuBpSW4CPkTz8/RrNMvQD8658QPAJUneThMwO2iuOD6rHfv+dtx7ktzQPv9sW8uJtCtuDdQ+/9aIK5K8nmZ+jv1ovj9PAn6P5vv3KGBrkn+g+Tf6BvAgmhD9j6pa8q0YktacWWVWfY9ZZVZJHWROmVPfY06ZU6uqOrC08EofNJNEnkPzA/NRFizj7WPs3+9juGW596K5ojDXfu/fADx+kdedyMDS68DtaVZC+iDNRKrfBrYDrwfus+C196EJvhvnzzuwb6ll6W91zgXbfoJm4tFr23P/N/ALixznd2hC6ds0VyGeMezY7dgX0Nz+/d12/5bFamm3HwT8GfC59jWfA14LHLi7r2Vg38XA9hH//fZvv+/X0Nzq/WWaX9b3GjJ25KXnB16zD03IXkjzPwLfbf99L6IJw30Hxp4z+G/ZbjsQOBu4jibk/5vmfwoW/vzchWaOiv+iuaJ4E/A/wKuAuw8c7yTgApqw/A7wBZq5Lh41pPZ70axCtr0d+1Xg8vb7dWg75vB2zMdofh6/2X5+KrB+0v99+ujuA7Nqrb/fx2BWmVWLH9+s8uFjwQNzaq2/38dgTplTix/fnPIx9kfab/5USnIWcG1Vvbx9fnBVfXnCZUmS9D1mlSSpy8wpSeq3NZ/jr31f+uuSXJFkV5KLFxl3VJL3JtmZ5LokpybZa2D/ATS3Qr9ifpsBJUkaB7NKktRl5pQkaVSTmOPvaOA4mvfv7zNsQDvZ54XAVTSrEd2bZmno2wEvbocdAVwP/EmSh9K83/z/qartq1m8JGkmmFWSpC4zpyRJI5nEqr7bqurQqnoKzTwGw5wC7As8qaouqKqzaOYPeF6Sde2YvYEfBN5ZVQ8E3kkzJ4IkSXvKrJIkdZk5JUkayZo3/mq05a2PBc6vqhsHtr2FJrge2T7/HDBXVecP7H/Q2AqVJM0ss0qS1GXmlCRpVJN4q+8ojqRZteZ7quraJDvbfduq6kvtnBY/UlUfAn6SZsWaoZKcDJwMsP/++z/oyCOPXL3qJUkju/zyy79SVQdNuo4VGGtWmVOS1E3m1C3MKknqpqWyqquNvw00S0ovtKPdN+8U4Owk+9Msgf6MxQ5YVVuBrQCbNm2qyy67bGzFSpJWLslnJl3DCo01q8wpSeomc+oWZpUkddNSWdXVxt9Iquoq4EdHHZ9kM7B548aNq1eUJEkDlpNV5pQkaa35N5Uk9dskFvcYxQ5g/ZDtG9p9K1JV26rq5PXrhx1akqRlGXtWmVOSpDHybypJUmcbf1fTzDvxPUkOBfZr961Iks1Jts7Nze1heZIkjT+rzClJ0hj5N5UkqbONv3cDj0tywMC2pwI3AZes9KBenZIkjdHYs8qckiSNkX9TSZLWfo6/JPsBx7VPDwHWJTmhfX5eVe0EzgKeA5yb5DTgCGALcOaC5eiXe27no5Ak7dakssqckiSNwr+pJEmjSlWt7QmTw4BPL7L78Kra3o47CngN8DCa1ajOBrZU1a49rcEVqCSpO5JcXlWbJl3HoElnlTklSd1hTg1nVklSdyyVVWt+x18bQhlh3FXAo1e9IEmSFjCrJEldZk5JkkbV1Tn+VoUT0UqSusyckiR1nVklSdNlphp/TkQrSeoyc0qS1HVmlSRNl5lq/EmSJEmSJEmzYqYaf96WLknqMnNKktR1ZpUkTZeZavx5W7okqcvMKUlS15lVkjRdZqrxJ0mSJEmSJM0KG3+SJEmSJElSD81U48/5KCRJXWZOSZK6zqySpOkyU40/56OQJHWZOSVJ6jqzSpKmy0w1/iRJkiRJkqRZYeNPkiRJkiRJ6iEbf5IkSZIkSVIPzVTjz4loJUldZk5JkrrOrJKk6TJTjT8nopUkdZk5JUnqOrNKkqbLTDX+JEmSJEmSpFlh40+SJEmSJEnqIRt/kiRJkiRJUg/Z+JMkSZIkSZJ6yMafJEmSJEmS1EN7T7qAaXTY775r0iWsiu1/9IRJlyD1lr83JEmSJElrbabu+EuyOcnWubm5SZciSdJtmFOSpK4zqyRpusxU46+qtlXVyevXr590KZIk3YY5JUnqOrNKkqbLTDX+JEmSJEmSpFlh40+SJEmSJEnqIRt/kiRJkiRJUg/Z+JMkSZIkSZJ6aO9JF7CnkmwHdgLfaTf9QlVdNbmKJEm6hTklSeo6s0qS+mvqG3+t46pq+6SLkCRpEeaUJKnrzCpJ6qGJvNU3ycYkr0tyRZJdSS5eZNxRSd6bZGeS65KcmmSvNS5XkjRjzClJUteZVZKkUUzqjr+jgeOAS4F9hg1IsgG4ELgKOB64N3AGTbPyxQuGvyNJgH8CtlTVd1epbknSbDCnJEldZ1ZJknZrUot7bKuqQ6vqKcBHFxlzCrAv8KSquqCqzgJeCjwvybqBcY+oqh8GHg4cBfz2KtYtSZoN5pQkqevMKknSbk2k8VdVN48w7Fjg/Kq6cWDbW2iC65EDx/pc+/EbwF8CPzrGUiVJM8ickiR1nVklSRrFpO74G8WRwNWDG6rqWprVpo4ESLL//JWqJHsDTwauGHawJCcnuSzJZddff/2qFi5JmgnmlCSp68wqSZpxXW78bQBuGLJ9R7sP4K7AvyS5AvgIsAt42bCDVdXWqtpUVZsOOuigVShXkjRjzClJUteZVZI04ya1uMdYVNU1wA+POj7JZmDzxo0bV60mSZLmmVOSpK4zqySp37p8x98OYP2Q7RvafctWVduq6uT164cdVpKkZTGnJEldZ1ZJ0ozrcuPvatp5J+YlORTYjwXzVIwqyeYkW+fm5sZQniRpxplTkqSuM6skacZ1ufH3buBxSQ4Y2PZU4CbgkpUc0KtTkqQxMqckSV1nVknSjJvIHH9J9gOOa58eAqxLckL7/Lyq2gmcBTwHODfJacARwBbgzAXL0S/nvM5HIUnaLXNKktR1ZpUkaRSTWtzjYOCtC7bNPz8c2F5VO5I8BngNsI1mNapX0gTVilTVNmDbpk2bTlrpMSRJM8GckiR1nVklSdqtiTT+qmo7kBHGXQU8etULkiRpgDklSeo6s0qSNIouz/E3dk5EK0nqMnNKktR1ZpUkTZeZavw5Ea0kqcvMKUlS140tq5J+PiSpY2aq8SdJkiRJkiTNiplq/HlbuiSpy8wpSVLXmVWSNF1mqvHnW6gkSV1mTkmSus6skqTpMlONP0mSJEmSJGlW2PiTJEmSJEmSemimGn/ORyFJ6jJzSpLUdWaVJE2XmWr8OR+FJKnLzClJUteZVZI0XWaq8SdJkiRJkiTNCht/kiRJkiRJUg/Z+JMkSZIkSZJ6aKYaf05EK0nqMnNKktR1ZpUkTZeZavw5Ea0kqcvMKUlS15lVkjRdZqrxJ0mSJEmSJM0KG3+SJEmSJElSD9n4kyRJkiRJknpo70kXIEmSxivJpEtYFVU16RIkSZKkqeIdf5IkSZIkSVIP2fiTJEmSJEmSemimGn9JNifZOjc3N+lSJEm6DXNKktR1ZpUkTZeZavxV1baqOnn9+vWTLkWSpNswpyRJXWdWSdJ0manGnyRJkiRJkjQrbPxJkiRJkiRJPWTjT5IkSZIkSeohG3+SJEmSJElSD/Wi8ZfktUlq0nVIkrQYs0qS1GXmlCT109Q3/pL8GPB9k65DkqTFmFWSpC4zpySpv9a88ZdkY5LXJbkiya4kFy8y7qgk702yM8l1SU5NsteCMXcA/gj47TUoXZI0I8wqSVKXmVOSpFHtPYFzHg0cB1wK7DNsQJINwIXAVcDxwL2BM2galS8eGPr7wF9W1fVJVrNmSdJsMaskSV1mTkmSRjKJxt+2qnonQJK3AQcOGXMKsC/wpKq6EbggyTpgS5LTq+rGJPcHHsKtQ0uSpHEwqyRJXWZOSZJGsuZv9a2qm0cYdixwfhtQ895CE1yPbJ8/HDgK+HSS7QBJtic5aIzlSpJmkFklSeoyc0qSNKquLu5xJHD14IaquhbY2e6jqv68qu5RVYdV1WHttsOq6vphB0xycpLLklx2/fVDh0iStBxjzSpzSpI0Zv5NJUkarfGX5OAkhw88T/tL/1VJNq9CXRuAG4Zs39HuW7aq2lpVm6pq00EHeQFLkvpm2rPKnJKkfpv2nAKzSpKm0ah3/J0D/NbA81OBPwMeD7w9yYnjLWv5qmq3M9Em2Zxk69zc3FqUJElaW+cw5VllTklSr53DlOcUmFWSNG1Gbfw9ELgIIMntaCaKfVFVHQm8DHjumOvaAawfsn1Du29FqmpbVZ28fv2wQ0uSptzUZ5U5JUm9NvU5BWaVJE2bURt/64Gvtp8/CLgz8Mb2+UXAxjHXdTXtvBPzkhwK7MeCeSqWw6tTktRrU59V5pQk9drU51R7DLNKkqbIqI2/z9Gs9gTwBODqqvp8+3w98K0x1/Vu4HFJDhjY9lTgJuCSlR7Uq1OS1GtTn1XmlCT12tTnFJhVkjRt9h5x3OuB05P8BE1I/d7AvocCHxv1hEn2A45rnx4CrEtyQvv8vKraCZwFPAc4N8lpwBHAFuDMBcvRL0s7ae7mjRvHfTFNktQBU59V5pQk9drU51R7brNKkqbISI2/qvrDJJ8HfgT4TZrQmndn4OxlnPNg4K0Lts0/PxzYXlU7kjwGeA2wjWY1qlfSBNWKVdU2YNumTZtO2pPjSJK6pw9ZZU5JUn/1IafArJKkaTNS4y/JPYE3V9VfD9n9m8DdRz1hVW0HdrtaVFVdBTx61ONKkmabWSVJ6jJzSpI0CaPO8fdp4AGL7Lt/u7/znIhWknpt6rPKnJKkXpv6nAKzSpKmzaiNv6WuJt0R+PYYall1TkQrSb029VllTklSr019ToFZJUnTZtG3+ia5P/DDA5uOS3LkgmF3BH4W+MT4S5MkaWlmlSSpy8wpSdKkLTXH3xOBl7SfF/D7i4z7NPBr4yxqtbgClST1Tq+yypySpN7pVU6BWSVJ02apt/q+HDgAWEdzW/qj2+eDjztU1b2r6sLVLnQcvC1dknqnV1llTklS7/Qqp8CskqRps+gdf1X1XeC77dNR5wKUJGnNmFWSpC4zpyRJk7bUW31vI8kPAN9PMw/FrVTVeeMqSpKklTKrJEldZk5JktbSSI2/JEcBbwGOZvhqVAXsNca6VoXzUUhSf/Uhq8wpSeqvPuQUmFWSNG1Gvd38dcAdgCcB9wUOX/A4YlWqGzPno5CkXpv6rDKnJKnXpj6nwKySpGkz6lt9HwD8XFX902oWI0nSHjCrJEldZk5JktbcqHf8fYohc1BIktQhZpUkqcvMKUnSmhu18fd84EVJpuL2c0nSTDKrJEldZk5JktbcqG/1/UPgEODqJNuBGxYOqKoHj6+s1eFEtJLUa1OfVeaUJPXa1OcUmFWSNG1Gbfxd2T6mWlVtA7Zt2rTppEnXIkkau6nPKnNKknpt6nMKzCpJmjYjNf6q6ldWuxBJkvaEWSVJ6jJzSpI0CaPO8SdJkiRJkiRpiox0x1+Sv9/dmKr62T0vR5KklTGrJEldZk5JkiZh1Dn+DhqybQNwJPBV4ONjq0iSpJUxqyRJXWZOSZLW3Khz/D1q2PYkhwJvB145zqIkSVous0qS1GXmlCRpEvZojr+q+izNsvSnj6ccSZLGy6ySJHWZOSVJWk3jWNxjF/D9YzjOqkuyOcnWubm5SZciSVpbU5FV5pQkzaypyCkwqyRp2oy6uMdRQzbfHrgf8AfAh8ZZ1Gqpqm3Atk2bNp006VokSePVh6wypySpv/qQU2BWSdK0GXVxjyuBGrI9wGXAM8dWkSRJK2NWSZK6zJySJK25URt/wyai/Rbwuar6/BjrkSRppcwqSVKXmVOSpDU36qq+l6x2IZIk7QmzSpLUZeaUJGkSRr3jjyR7A08GHgHcGfga8H7g3Kr639UpT5Kk0ZlVkqQuM6ckSWtt1MU9DgbeA9wf2A58CXgY8OvAR5I8tqquX60il6jrEuBONPNifAJ4RlXduNZ1SJImz6ySJHVZV3Oqrc2skqSeut2I484E7gI8tKqOqKqHVdURwEPa7WeuVoG78dNV9X+q6v7AtcDvTKgOSdLkmVWSpC7rak6BWSVJvTVq4+844IVV9cHBjVX1IeD3gCeMesIkG5O8LskVSXYluXiRcUcleW+SnUmuS3Jqkr0WnH+uHXs7YH+Gr5IlSZoNZpUkqcvGllNgVkmSRjPqHH93AL6+yL6vA7dfxjmPpgm9S4F9hg1IsgG4ELgKOB64N3AGTaPyxQvGngf8CPBR4PnLqEOS1C9mlSSpy8aZU2BWSZJGMOodf5cCL0yy/+DG9vkL2/2j2lZVh1bVU2hCZZhTgH2BJ1XVBVV1FvBS4HlJ1g0OrKrjgLsBHwSevYw6JEn9YlZJkrpsnDkFZpUkaQSjNv6eT3NF6bNJ3pLkT5K8GfgscBTLuCJUVTePMOxY4PwFE8q+hSa0HjnkmLuANwC/NGodkqTeMaskSV02tpwCs0qSNJqRGn9V9V/AfYCtwEHATwIHA2cB96mqj4y5riOBqxfUcC2ws91Hkg1J7jow5MnAlYsdMMnJSS5Lctn1109ksSxJ0iqa9qwypySp3yaQU2BWSdLMG3WOP6rqK8DvrmItgzYANwzZvqPdNz/m75LcgWbZ+Y8Bv7nYAatqK03IsmnTJierlaQemuasMqckqf/WOKfArJKkmbdo4y/JHYGTgA9V1dD5JpI8lGYC2NdV1XdWp8Thquqa9twjS7IZ2Lxx48bVKUqStKb6llXmlCT1S9dzCswqSeq7pd7q+2zg/7Lg1vAFPkaz9Pwp4yyK5grU+iHbN7T7VqSqtlXVyevXDzu0JGkK9SqrzClJ6p1J5hSYVZI085Zq/P0c8OqqumGxAVU1B7wG+MUx13U17ZwT85IcCuzH0qG5pCSbk2ydm5vbw/IkSR3Rq6wypySpdyaZU2BWSdLMW6rxdzTw7yMc49J27Di9G3hckgMGtj0VuAm4ZKUH9eqUJPVOr7LKnJKk3plkToFZJUkzb6nFPVZlstYk+wHHtU8PAdYlOaF9fl5V7aRZ2eo5wLlJTgOOALYAZy5Yin6553Y+Cknql15llTklSb2zagtgmFWSpFEsdcffJ4CHj3CMh7djR3Uw8Nb28VDgqIHnBwNU1Q7gMcBewDbgpcArgZcs4zy34dUpSeqdXmWVOSVJvbNaOQVmlSRpBEvd8fcm4EVJ3lZVHxs2IMn9gOcCLxv1hFW1nWaZ+N2Nuwp49KjHlSTNJLNKktRlq5JTYFZJkkazVOPvT4HjgQ8m+XPgfOBamtvV7wk8DngW8J/Aq1e5zrHwtnRJ6p1eZZU5JUm906ucArNKkqbNom/1rarvAD8JbKUJo/fQrPz0ceBCmqXptwKPq6rvrn6pe87b0iWpX/qWVeaUJPVL33IKzCpJmjZL3fFHVX0LeH6SFwMPopk0FuDzwGXtfkmSJsaskiR1mTklSZqkJRt/86rqJuADq1zLqvO2dEnqrz5klTklSf3Vh5wCs0qSps1Sq/r2jrelS5K6zJySJHWdWSVJ02WmGn+SJEmSJEnSrLDxJ0mSJEmSJPXQTDX+kmxOsnVubm7SpUiSdBvmlCSp68wqSZouM9X4cz4KSVKXmVOSpK4zqyRpuoy0qm+S1y+x+2bgRuC/gHOr6htjqEuSpGUxqyRJXWZOSZImYaTGH/BDwKHAwcCXgOuBg4C7Al8G5oDfAF6W5DFV9YlVqFWSpKWYVZKkLjOnJElrbtS3+v4+cAPwkKq6e1Xdv6ruDjyUJqB+B7gv8HXgFatRqCRJu2FWSZK6zJySJK25URt/pwMvqaoPDW6sqg8CW4DTqurTwB8BPz7WCsfIiWglqdemPqvMKUnqtanPKTCrJGnajNr42wjctMi+ncBh7eefAe6whzWtGieilaRem/qsMqckqdemPqfArJKkaTNq4+8/gZckudvgxiR3B14CXN5uuhdw3fjKkyRpZGaVJKnLzClJ0pobdXGPU4Dzge1JLueWiWgfBHwNeFw77h7AX4y7SEmSRmBWSZK6zJySJK25kRp/VXVFkiOAZwCbgLsBnwDeCPxVVd3Ujvuj1SpUkqSlmFWSpC4zpyRJkzDqHX+0QfTaVaxFkqQ9YlZJkrrMnJIkrbWRG38ASR4CPAK4M83t6O9vV6GSJKkTzCpJUpeZU5KktTRS4y/J/sBbgccD/wt8FbgLsFeSfwaeUlU7V61KSZJ2w6ySJHWZOSVJmoRRV/U9HXgY8FTgjlV1d+COwM+1209bnfLGK8nmJFvn5uYmXYokafymPqvMKUnqtanPKTCrJGnajNr4ezLwwqp6a1XdDFBVN1fVW4HfBZ6yWgWOU1Vtq6qT169fP+lSJEnjN/VZZU5JUq9NfU6BWSVJ02bUxt964LOL7PsssG485UiStGJmlSSpy8wpSdKaG7Xx9xHgWUkyuLF9/qx2vyRJk2RWSZK6zJySJK25UVf1fRHwbuDqJG8HvgQcDDwROJxmglpJkibJrJIkdZk5JUlacyM1/qrqoiQPBP5fmrkn7g58AfgP4ElVddXqlShJ0u6ZVZKkLjOnJEmTMOodf1TVR2lWnLqVJHdJ8uNV9S9jrWwESQ4FzgHuAdwMvItmwtxa61okSZPXtawypyRJg7qWU+25zSpJ6rFR5/hbyjHA+8ZwnJX4X5pQuh/wAOAhwJMmVIskqbuOYTJZZU5JkkZxDP5NJUlaBeNo/C1bko1JXpfkiiS7kly8yLijkrw3yc4k1yU5Ncle8/ur6gtVdVn7+XeAK4BD1+SLkCT1ljklSeo6s0qSNIqR3+o7ZkcDxwGXAvsMG5BkA3AhcBVwPHBv4AyaZuWLh4y/C/AzwGNXpWJJ0iwxpyRJXWdWSZJ2a1KNv21V9U6AJG8DDhwy5hRgX5qJbm8ELkiyDtiS5PR2G+0x7gC8DXhVVX1s9cuXJPWcOSVJ6jqzSpK0WxN5q29V3TzCsGOB8wfDCHgLTXA9cn5De5v6G4H/rKozxlqoJGkmmVOSpK4zqyRJo1j0jr8k1wOjrOR0h/GVcytHAhcNbqiqa5PsbPdtaze/Dvg68PylDpbkZOBkgHve855jL1aStPYmnFXmlCRpSf5NJUljlEy6gtWzigupL/VW39cyWkitlg3ADUO272j3keThwK8CVwL/meaH4PVV9acLX1RVW4GtAJs2bXJpeknqh0lmlTklSdod/6aSJE3Uoo2/qtqyhnWsSFX9KzByyzfJZmDzxo0bV68oSdKa6XpWmVOSNNu6nlNgVklS301kjr8R7QDWD9m+od23bFW1rapOXr9+2GElSVoWc0qS1HVmlSTNuC43/q6mmXfie5IcCuzX7lu2JJuTbJ2bmxtDeZKkGWdOSZK6zqySpBnX5cbfu4HHJTlgYNtTgZuAS1ZyQK9OSZLGyJySJHWdWSVJM26pxT1WTZL9gOPap4cA65Kc0D4/r6p2AmcBzwHOTXIacASwBThzwXL0yzmv81FIknbLnJIkdZ1ZJUkaxUQaf8DBwFsXbJt/fjiwvap2JHkM8BqaZeZvAF5JE1QrUlXbgG2bNm06aaXHkCTNBHNKktR1ZpUkabcm0virqu2MsHJUVV0FPHrVC5IkaYA5JUnqOrNKkjSKLs/xN3ZORCtJ6jJzSpLUdWaVJE2XmWr8ORGtJKnLzClJUteZVZI0XWaq8SdJkiRJkiTNiplq/HlbuiSpy8wpSVLXmVWSNF1mqvHnbemSpC4zpyRJXWdWSdJ0manGnyRJkiRJkjQrbPxJkiRJkiRJPTRTjT/no5AkdZk5JUnqOrNKkqbLTDX+nI9CktRl5pSkSUrSy4fGy6ySpOkyU40/SZIkSZIkaVbY+JMkSZIkSZJ6yMafJEmSJEmS1EMz1fhzIlpJUpeZU5KkrjOrJGm6zFTjz4loJUldZk5JkrrOrJKk6TJTjT9JkiRJkiRpVtj4kyRJkiRJknrIxp8kSZIkSZLUQzb+JEmSJEmSpB6y8SdJkiRJkiT1kI0/SZIkSZIkqYdmqvGXZHOSrXNzc5MuRZKk2zCnJEldZ1ZJ0nSZqcZfVW2rqpPXr18/6VIkSboNc0qS1HVmlSRNl5lq/EmSJEmSJEmzwsafJEmSJEmS1EM2/iRJkiRJkqQesvEnSZIkSZIk9dDUN/6S/HmSzyepSdciSdJC5pQkqevMKknqr6lv/AFvBh446SIkSVqEOSVJ6jqzSpJ6aiKNvyQbk7wuyRVJdiW5eJFxRyV5b5KdSa5LcmqSvQbHVNW/VNWX1qRwSdJMMKckSV1nVkmSRrH3hM57NHAccCmwz7ABSTYAFwJXAccD9wbOoGlWvnhtypQkzShzSpLUdWaVJGm3JtX421ZV7wRI8jbgwCFjTgH2BZ5UVTcCFyRZB2xJcnq7TZKk1WBOSZK6zqySJO3WRN7qW1U3jzDsWOD8BWH0FprgeuSqFCZJEuaUJKn7zCpJ0ii6vLjHkcDVgxuq6lpgZ7tvWZKcnOSyJJddf/31YypRkjTDzClJUteZVZI047rc+NsA3DBk+452HwBJzk7yufbzzyU5e9jBqmprVW2qqk0HHXTQatQrSZot5pQkqevMKkmacZOa429squqZo45NshnYvHHjxlWsSJKkW5hTkqSuM6skqb+6fMffDmD9kO0b2n3LVlXbqurk9euHHVaSpGUxpyRJXWdWSdKM63Lj72oWzDuR5FBgPxbMUzGqJJuTbJ2bmxtDeZKkGWdOSZK6zqySpBnX5cbfu4HHJTlgYNtTgZuAS1ZyQK9OSZLGyJySJHWdWSVJM24ic/wl2Q84rn16CLAuyQnt8/OqaidwFvAc4NwkpwFHAFuAMxcsR7+c8zofhSRpt8wpSVLXmVWSpFFManGPg4G3Ltg2//xwYHtV7UjyGOA1wDaa1aheSRNUK1JV24BtmzZtOmmlx5AkzQRzSpLUdWaVJGm3JtL4q6rtQEYYdxXw6FUvSJKkAeaUJKnrzCpJ0ii6PMff2DkRrSSpy8wpSVLXmVWSNF1mqvHnRLSSpC4zpyRJXWdWSdJ0manGnyRJkiRJkjQrZqrx523pkqQuM6ckSV1nVknSdJmpxp+3pUuSusyckiR1nVklSdNlphp/kiRJkiRJ0qyw8SdJkiRJkiT10Ew1/pyPQpLUZeaUJKnrzCpJmi4z1fhzPgpJUpeZU5KkrjOrJGm6zFTjT5IkSZIkSZoVNv4kSZIkSZKkHrLxJ0mSJEmSJPXQTDX+nIhWktRl5pQkqevMKkmaLjPV+HMiWklSl5lTkqSuM6skabrMVONPkiRJkiRJmhU2/iRJkiRJkqQesvEnSZIkSZIk9ZCNP0mSJEmSJKmHbPxJkiRJkiRJPTRTjT+XnpckdZk5JUnqOrNKkqbLTDX+XHpektRl5pQkqevMKkmaLjPV+JMkSZIkSZJmhY0/SZIkSZIkqYds/EmSJEmSJEk9ZONPkiRJkiRJ6iEbf5IkSZIkSVIPTXXjL8kPJvlwkk8m+cckB0y6JkmSBplVkqQuM6ckqd+muvEHnAW8uKruA1wNvGDC9UiStJBZJUnqMnNKknpszRt/STYmeV2SK5LsSnLxIuOOSvLeJDuTXJfk1CR7Dey/K3B4VZ3XbvpL4Mmr/xVIkvrOrJIkdZk5JUka1d4TOOfRwHHApcA+wwYk2QBcCFwFHA/cGziDplH54nbY9wOfG3jZtcChq1OyJGnGmFWSpC4zpyRJI5lE429bVb0TIMnbgAOHjDkF2Bd4UlXdCFyQZB2wJcnp7basWcWSpFljVkmSusyckiSNZM3f6ltVN48w7Fjg/DaM5r2FJrge2T7/HM0Vqnn35NZXqyRJWhGzSpLUZeaUJGlUk7jjbxRHAhcNbqiqa5PsbPdtq6ovJtme5Lh2TopfBc5d7IBJTgZObp9+I8nHV6n2cTsQ+MpanCinrcVZJK2Bafu9ca+xHGXtjTWrzKndS7wxReqRafrdYU61zKoRmFVSn0zT745Fs6qrjb8NwA1Dtu9o9817FvCGJH8CfBz4xcUOWFVbga1jrHFNJLmsqjZNug5J08PfG2tmrFllTkmaJf7uWBP+TdXy503SSvTld0dXG38jqaorgAdMug5JkhZjVkmSusyckqR+W/M5/ka0A1g/ZPuGdp8kSZNmVkmSusyckiR1tvF3Nc28E9+T5FBgv3bfLJm6W+klTZy/N9aGWdXw503SSvi7Y/WZU7fw503SSvTid0dXG3/vBh6X5ICBbU8FbgIumUxJk9HOoyFJI/P3xpoxq/DnTdLK+LtjTZhTLX/eJK1EX353rPkcf0n2A45rnx4CrEtyQvv8vKraCZwFPAc4N8lpwBHAFuDMBcvRS5I0dmaVJKnLzClJ0qhSVWt7wuQw4NOL7D68qra3444CXgM8jGY1qrOBLVW1a/WrlCTNMrNKktRl5pQkaVRr3vjT7rUB/WpuHdAvNaAlLSbJRuB3aH5vHA28v6qOmWhR6jWzStJymVVaS+aUpOXqa06t+Vt9tbQkG4ALgauA44F7A2fQzMf44gmWJqnbjqZ5y8+lwD4TrkU9Z1ZJWiGzSmvCnJK0Qr3MKe/465gkvwe8ALjX/NwbSV5AMx/H3ZyPQ9IwSW5XVTe3n78NOLAPV6fUTWaVpJUwq7RWzClJK9HXnOrqqr6z7Fjg/AVh9BZgX+CRkylJUtfNB5S0RswqSctmVmkNmVOSlq2vOWXjr3uOBK4e3FBV1wI7232SJE2aWSVJ6jJzSpJaNv66ZwPN5LML7Wj3SZI0aWaVJKnLzClJatn4kyRJkiRJknrIxl/37ADWD9m+od0nSdKkmVWSpC4zpySpZeOve65mwbwTSQ4F9mPBPBWSJE2IWSVJ6jJzSpJaNv66593A45IcMLDtqcBNwCWTKUmSpFsxqyRJXWZOSVJr70kXoNs4C3gOcG6S04AjgC3AmQuWo5ek70myH3Bc+/QQYF2SE9rn51XVzslUpp4yqyQtm1mlNWROSVq2vuZUqmrSNWiBJEcBrwEeRrMa1dnAlqraNcm6JHVXksOATy+y+/Cq2r521WgWmFWSlsus0loypyQtV19zysafJEmSJEmS1EPO8SdJkiRJkiT1kI0/SZIkSZIkqYds/EmSJEmSJEk9ZONPkiRJkiRJ6iEbf5IkSZIkSVIP2fiTJEmSJEmSesjGnzQhSZ6c5KIkNyT5dpJPJDkzyT2SHJakkvzUpOuUJM0us0qS1GXmlLR7Nv6kCUhyBvD3wDXA04HHAq8EHgO8doKlSZIEmFWSpG4zp6TR7D3pAqRZk2Qz8DzgV6vq9QO7LkmylSawJEmaGLNKktRl5pQ0Ou/4k9bebwEfXhBQAFTVrqp697AXJfmlJB9I8rUkO5K8L8mmBWOOTvLP7ZhvJvlYkl8f2P+IJO9PcmP7+K8kTxn7VyhJmnZmlSSpy8wpaUTe8SetoST7AD8KnLGClx8G/DXwKeD2wM8D709ydFVd047ZBnwMeBrwbeC+wLr23OuAfwLeCZwKBPgh4E4r+2okSX1kVkmSusyckpbHxp+0tu4C3AG4drkvrKpT5z9PcjvgAuDBNIF0apIDgcOB46vqv9uh7x04xA8A64HfqKqvt9ves+yvQJLUd2aVJKnLzClpGXyrrzQZtdwXJLlfkrcn+RKwC/guzdWnH2iHfA34LHBWkqcmOXjBIT4FfAN4U5Ljk9xpxdVLkmaBWSVJ6jJzShqBjT9pbX2V5nbxey7nRUkOoLmSdCjNJLY/BvwI8BHgjgBVdTPNJLZfBF4PfLGde+IB7f4dwE8C+9CsfnV9knclOWIMX5ckqT/MKklSl5lT0jLY+JPWUFV9F/hX4HHLfOnDgO8HnlZVb6yqD1TVZTS3mQ8e/+qqejLNHBM/QRNg72pvY6eqLq2qx7f7n0RzZetNK/+KJEl9Y1ZJkrrMnJKWx8aftPZeBWxK8ssLdyS5XZLHD3nNvu3Hbw+M/VGayWlvo6q+W1UXAWcCd2fBZLNVdVNVbaO5inXU8r8ESVLPvQqzSpLUXa/CnJJG4uIe0hqrqm1JzgT+MsnDaVaE+gZwJHAKsJ1mefpBl7Zj/iLJ6TRXqrYAn58fkOT+wB8DfwdcA2wAXgh8pKq+luQJwDOAd9BMhHsI8GvARavxdUqSppdZJUnqMnNKGp2NP2kCqur5Sf4N+A2a28L3pQmnf6QJmjsuGP+lJE9p970T+CRNoL1gYNgXgS8B/xe4B3AD8D6aoAL4H5oJcF8OHAxcT7MU/YvG/fVJkqafWSVJ6jJzShpNqpa9EI4kSZIkSZKkjnOOP0mSJEmSJKmHbPxJkiRJkiRJPWTjT5IkSZIkSeohG3+SJEmSJElSD9n4kyRJkiRJknrIxp8kSZIkSZLUQzb+JEmSJEmSpB6y8SdJkiRJkiT10P8PZdpUE8PhEBUAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "plt.rcParams['font.size']=15\n", - "f = plt.figure(figsize=(18, 5))\n", - "\n", - "ax = f.add_subplot(1,3,1)\n", - "ax.hist(train_test_data[\"train\"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True)\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Train:\\nDistribution of Classes\")\n", - "ax.set_ylim([1, 1e6])\n", - "\n", - "ax = f.add_subplot(1,3,2)\n", - "ax.hist(train_test_data[\"dev\"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True, color='k')\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Dev:\\nDistribution of Classes\")\n", - "ax.set_ylim([1, 1e6])\n", - "\n", - "\n", - "ax = f.add_subplot(1,3,3)\n", - "ax.hist(train_test_data[\"test\"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True, color='r')\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Test:\\nDistribution of Classes\")\n", - "ax.set_ylim([1, 1e6])\n", - "\n", - "f.tight_layout()" - ] - }, - { - "cell_type": "markdown", - "id": "operational-binary", - "metadata": {}, - "source": [ - "As you can see, our splitter has greatly increased the representation of the minority class within the training data, but not so for the testing or dev data.\n", - "\n", - "We hope this tutorial was informative on how to introduce a **custom splitter method** to your datasets! For more customization tutorials, please check our [documentation](https://lightwood.io/tutorials.html).\n", - "\n", - "If you want to download the Jupyter-notebook version of this tutorial, check out the source github location found here: `lightwood/docssrc/source/tutorials/custom_splitter`. " - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/_sources/tutorials/tutorial_data_analysis/tutorial_data_analysis.ipynb.txt b/docs/_sources/tutorials/tutorial_data_analysis/tutorial_data_analysis.ipynb.txt deleted file mode 100644 index 08a3e7b02..000000000 --- a/docs/_sources/tutorials/tutorial_data_analysis/tutorial_data_analysis.ipynb.txt +++ /dev/null @@ -1,774 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Introduction to Lightwood's statistical analysis\n", - "\n", - "\n", - "As you might already know, Lightwood is designed to be a flexible machine learning (ML) library that is able to abstract and automate the entire ML pipeline. Crucially, it is also designed to be extended or modified very easily according to your needs, essentially offering the entire spectrum between fully automated AutoML and a lightweight wrapper for customized ML pipelines.\n", - "\n", - "As such, we can identify several different customizable \"phases\" in the process. The relevant phase for this tutorial is the \"statistical analysis\" that is normally ran in two different places:\n", - "\n", - "* To generate a Json AI object from some dataset and a problem definition\n", - "* To train a Lightwood predictor\n", - "\n", - "In both cases, we generate a `StatisticalAnalyzer` object to store key facts about the data we are using, and refer to them afterwards.\n", - "\n", - "## Objective\n", - "\n", - "In this tutorial, we will take a look at the automatically generated statistical analysis for a sample dataset." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 1: load the dataset and define the predictive task\n", - "\n", - "The first thing we need is a dataset to analyze. Let's use Human Development Index information:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
PopulationArea (sq. mi.)Pop. DensityGDP ($ per capita)Literacy (%)Infant mortalityDevelopment Index
0994420112840007.7120047.593.822
1545066143094126.531100100.04.564
22678338343707261.3150040.450.252
3943910292.5340097.07.354
4343193217622019.51280098.011.953
\n", - "
" - ], - "text/plain": [ - " Population Area (sq. mi.) Pop. Density GDP ($ per capita) \\\n", - "0 9944201 1284000 7.7 1200 \n", - "1 5450661 43094 126.5 31100 \n", - "2 26783383 437072 61.3 1500 \n", - "3 9439 102 92.5 3400 \n", - "4 3431932 176220 19.5 12800 \n", - "\n", - " Literacy (%) Infant mortality Development Index \n", - "0 47.5 93.82 2 \n", - "1 100.0 4.56 4 \n", - "2 40.4 50.25 2 \n", - "3 97.0 7.35 4 \n", - "4 98.0 11.95 3 " - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import pandas as pd\n", - "\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/stable/tests/data/hdi.csv')\n", - "df.head()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This dataset has a handful of important factors to each country's development index, as well as the index itself (very high, high, medium or low). Each row gives information about a country's status in terms of their population size, density, and GDP per capita, among others.\n", - "\n", - "We can see there are columns with integer (e.g. `Population`), float (`Pop. Density`) or categorical (e.g. `Development Index`) data.\n", - "\n", - "The task we will consider here is to predicting the development index of each nation based on the rest of the available information.\n", - "\n", - "Lightwood provides an abstraction called `ProblemDefinition` to specify the target column of a dataset, along with other important parameters that you might want to define (for a complete list, check the documentation).\n", - "\n", - "We will create a simple one:" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.api.high_level import ProblemDefinition\n", - "\n", - "problem_definition = ProblemDefinition.from_dict({'target': 'Development Index'})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's see how this object has been populated. `ProblemDefinition` is a Python `dataclass`, so it comes with some convenient tools to achieve this:" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'target': 'Development Index',\n", - " 'pct_invalid': 2,\n", - " 'unbias_target': True,\n", - " 'seconds_per_mixer': None,\n", - " 'seconds_per_encoder': None,\n", - " 'time_aim': None,\n", - " 'target_weights': None,\n", - " 'positive_domain': False,\n", - " 'timeseries_settings': TimeseriesSettings(is_timeseries=False, order_by=None, window=None, group_by=None, use_previous_target=True, nr_predictions=None, historical_columns=None, target_type='', allow_incomplete_history=False),\n", - " 'anomaly_detection': True,\n", - " 'ignore_features': [],\n", - " 'fit_on_all': True,\n", - " 'strict_mode': True,\n", - " 'seed_nr': 420}" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from dataclasses import fields\n", - "\n", - "{field.name: getattr(problem_definition, field.name) for field in fields(ProblemDefinition)}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice how, even though we only defined what the `target` was, there are a bunch of additional parameters that have been assigned a default value. That is fine for our purposes, but remember that you can set any of these according to your own predictive needs.\n", - "\n", - "We also need to infer the type of each column. There is a method for this, `infer_types`, that we can use:" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-42831:Analyzing a sample of 222\u001b[0m\n", - "\u001b[32mINFO:lightwood-42831:from a total population of 225, this is equivalent to 98.7% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-42831:Using 15 processes to deduct types.\u001b[0m\n" - ] - }, - { - "data": { - "text/plain": [ - "{'additional_info', 'dtypes', 'identifiers'}" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from lightwood.data import infer_types\n", - "from lightwood.api.types import TypeInformation\n", - "\n", - "type_information = infer_types(df, problem_definition.pct_invalid)\n", - "\n", - "{field.name for field in fields(TypeInformation)} # show the fields this dataclass has" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can now check the inferred types:" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'Population': 'integer',\n", - " 'Area (sq. mi.)': 'integer',\n", - " 'Pop. Density ': 'float',\n", - " 'GDP ($ per capita)': 'integer',\n", - " 'Literacy (%)': 'float',\n", - " 'Infant mortality ': 'float',\n", - " 'Development Index': 'categorical'}" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type_information.dtypes" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Looks OK!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 2: Run the statistical analysis\n", - "\n", - "We now have all the necessary ingredients to run the statistical analysis. Normally, you would ask Lightwood for a Json AI object to be generated according to the dataset and the problem definition. Internally, Lightwood will then run the statistical analysis for the provided dataset, and store it for later usage.\n", - "\n", - "Afterwards, you would make modifications to the Json AI as needed (for some examples, check out the other tutorials in `lightwood/examples/json_ai`), and finally generate a predictor object to learn and predict the task.\n", - "\n", - "In this case though, we will call it directly:" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-42831:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-42831:Finished statistical analysis\u001b[0m\n" - ] - } - ], - "source": [ - "from lightwood.api.types import StatisticalAnalysis # the class where everything is stored\n", - "from lightwood.data import statistical_analysis # generates an instance of the class\n", - "\n", - "stan = statistical_analysis(df, \n", - " type_information.dtypes, \n", - " type_information.identifiers, \n", - " problem_definition)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 3: Peeking inside\n", - "\n", - "Now that our analysis is complete, we can check what Lightwood thinks of this dataset:" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'avg_words_per_sentence',\n", - " 'bias',\n", - " 'buckets',\n", - " 'df_std_dev',\n", - " 'distinct',\n", - " 'histograms',\n", - " 'missing',\n", - " 'nr_rows',\n", - " 'positive_domain',\n", - " 'target_class_distribution',\n", - " 'train_observed_classes'}" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "{field.name for field in fields(StatisticalAnalysis)} # show the fields this dataclass has" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Some of these fields aren't really applicable nor useful for this dataset, so let's only check the ones that are. \n", - "\n", - "We can start with a very basic question: how many rows does the dataset have?" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "225" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.nr_rows" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here are some other insights produced in the analysis:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Amount of missing information\n", - "\n", - "Is there missing information in the dataset?" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "{'Population': 0.0,\n", - " 'Area (sq. mi.)': 0.0,\n", - " 'Pop. Density ': 0.0,\n", - " 'GDP ($ per capita)': 0.0,\n", - " 'Literacy (%)': 0.0,\n", - " 'Infant mortality ': 0.0,\n", - " 'Development Index': 0.0}" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.missing" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Seemingly not!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Buckets per column\n", - "\n", - "For numerical colums, values are bucketized into discrete ranges.\n", - "\n", - "Each categorical column gets a bucket per each observed class.\n", - "\n", - "Let's check an example for one of each:" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['3', '4', '2', '1']" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.buckets['Development Index'] # categorical" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[500,\n", - " 1592,\n", - " 2684,\n", - " 3776,\n", - " 4868,\n", - " 5960,\n", - " 7052,\n", - " 8144,\n", - " 9236,\n", - " 10328,\n", - " 11420,\n", - " 12512,\n", - " 13604,\n", - " 14696,\n", - " 15788,\n", - " 16880,\n", - " 17972,\n", - " 19064,\n", - " 20156,\n", - " 21248,\n", - " 22340,\n", - " 23432,\n", - " 24524,\n", - " 25616,\n", - " 26708,\n", - " 27800,\n", - " 28892,\n", - " 29984,\n", - " 31076,\n", - " 32168,\n", - " 33260,\n", - " 34352,\n", - " 35444,\n", - " 36536,\n", - " 37628,\n", - " 38720,\n", - " 39812,\n", - " 40904,\n", - " 41996,\n", - " 43088,\n", - " 44180,\n", - " 45272,\n", - " 46364,\n", - " 47456,\n", - " 48548,\n", - " 49640,\n", - " 50732,\n", - " 51824,\n", - " 52916,\n", - " 54008]" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.buckets['GDP ($ per capita)'] # numerical" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Bias per column\n", - "\n", - "We can also check whether each column has buckets of data that exhibit some degree of bias:" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "'Population' entropy: 0.212\n", - "Biased buckets: [131403695, 78845027, 52565693, 26286360, 7026]\n", - "\n", - "'Area (sq. mi.)' entropy: 0.294\n", - "\n", - "\n", - "'Pop. Density ' entropy: 0.143\n", - "Biased buckets: [650.86, 6183.17, 976.29, 325.43, 0.0]\n", - "\n", - "'GDP ($ per capita)' entropy: 0.76\n", - "\n", - "\n", - "'Literacy (%)' entropy: 0.753\n", - "\n", - "\n", - "'Infant mortality ' entropy: 0.767\n", - "\n", - "\n", - "'Development Index' entropy: 0.89\n", - "\n", - "\n" - ] - } - ], - "source": [ - "for colname, col in stan.bias.items():\n", - " print(f\"'{colname}' entropy: {round(col['entropy'], 3)}\")\n", - " print(f\"Biased buckets: {col['biased_buckets']}\\n\" if col['biased_buckets'] else '\\n')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Column histograms\n", - "\n", - "Finally, let's plot histograms for some columns:" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABBQAAAGnCAYAAAAHeQTgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACo20lEQVR4nOydd7gkRdX/P7WZsEl2WTJLXvICS845Z0GQnKOSc1qSZFhyzjnnJBkBQUkiQQUVFUXArO/rz1elfn+c00zd3rm3q2dm2bvL9/M889zp7ulzK4dTp06FGCNCCCGEEEIIIYQQdegzuQMghBBCCCGEEEKIKQ8pFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCfOUIIbwTQlhtcodjUhNCOCWE8IcQwu8nYxhWCyF8NLn+/+QmhPBsCGH3Ft+dI4TwjxBC306HSwghhOgEUigIIYSYqgghfBhCWKt0b+cQwgvFdYxx4RjjsxVyRocQYgih3yQK6iQlhDAHcAiwUIxxpskdnt6A5+f/+CT9tyGEc3vTZL1cdmOMv44xTh9j/O/kDJcQQgjRHVIoCCGEEJOBL0FRMQfwxxjjp3VfnFKVKJksHmOcHlgT+Cawx2QOjxBCCDHFIoWCEEKIrxzpSnAIYZkQwqshhL+FED4JIZzrP3ve//7FV7SXDyH0CSEcG0L4VQjh0xDCDSGEoYncHf3ZH0MIx5X+z/gQwl0hhJtCCH8Ddvb//f0Qwl9CCB+HEC4KIQxI5MUQwr4hhPdDCH8PIZwcQpgnhPCSh/eO9PfJe2sBTwCzeNiv8/ub+HaPv7gp/oKlNDkihPAW8D/NlAohhIVDCE+EEP7kaXW03x8YQpgQQvidfyaEEAZ2k/YxhDBvcn1dCOEU/75aCOGjEMLhnr4fhxA2CyFsEEL4mf/fo5N3x3sa3ODp804IYVyPme/EGH8CfA9YxGXtEUL4wP/HAyGEWUph/nYI4Re+heSsEEKfJAw3Jb/t1rLF8+5pLx9/CCHcHEIY5s9uxJRAD3qeHV6WFUKYxcP2Jw/rHonsltNCCCGEaBUpFIQQQnzVOR84P8Y4BJgHuMPvr+J/h7nZ+feBnf2zOjA3MD1wEUAIYSHgEmA7YGZgKDBr6X9tCtwFDANuBv4LHASMAJbHVs33Lb2zLrAUsBxwOHAFsD0wOzYZ3rYcoRjjk8D6wO887DuHEOYHbgUOBEYCj2CT11QhsS2wocf5P6nMEMJg4EngMWAWYF7gKX98jIdvLLA4sAxwbDlcmcwEDMLS7njgSo/vUsDKwHEhhLmS328C3Ial6QN4flTh+bUy8EYIYQ3gNGBrLO9+5TJTNgfGAUti+bhr/agR/P/MAiyI5eF4gBjjDsCvgY09z85s8v5twEf+/teB73jYC1pKCyGEEKJVpFAQQggxNXKfr8L/JYTwF2yi3x3/BuYNIYyIMf4jxvhyD7/dDjg3xviLGOM/gKOAbXwF+evAgzHGF2KM/4dNhmPp/e/HGO+LMX4eY/xnjPG1GOPLMcb/xBg/BC4HVi29c2aM8W8xxneAt4Hv+v//K/AosERWisA3gIdjjE/EGP8NnA1MA6yQ/OaCGONvYoz/bPL+RsDvY4znxBj/X4zx7zHGV5J0OSnG+GmM8TPgRGCHzHCV+TdwqofxNkzZcr7/v3eAdzGlRcELMcZH3M/AjaVnzXg9hPBn4EHgKuBaD/81McbXY4z/wvJ1+RDC6OS9M2KMf4ox/hqYQBNFThUxxg88/f/l6XQuE+d3U0IIswMrAkd4+r/p4d8x+VndtBBCCCHaQgoFIYQQUyObxRiHFR8mXvVP2Q2YH/hJCOGHIYSNevjtLNjqdcGvgH7AKH/2m+JBjPF/gT+W3v9NehFCmD+E8FAI4fe+DeI72AQ65ZPk+z+bXE/fQ3i7DXuM8XMPT2pF8ZvySwmzAz/Pke3fZ+nmt1X8MXFCWCg2eopzeoLF/wKDmm03SFgyxjg8xjhPjPFYT4dy2vwDy7vu0qal+IUQRoUQbgvmEPJvwE1MnN/dMQvwpxjj30vhSMNYNy2EEEKItpBCQQghxFeaGOP7McZtgRmBM4C7QgjTMbF1AcDvgDmT6zmA/2AT3o+B2YoHIYRpgBnK/650fSnwE2A+33JxNGYWPynoEvYQQsCUBL/tIXwpv8G2eVTKxtLld9389n+BaZPr3nACRTltpsPyLk2b2ZPvafz+h/z4fAdL40U9v7ena373lP6/A77mW0/ScPy2m98LIYQQkxwpFIQQQnylCSFsH0IY6SvVf/HbnwOf+d90En0rcFAIYa4QwvTYBPF29zdwF7BxCGEF90swnmrlwGDgb8A/QghjgH06FK1m3AFsGEJYM4TQHztS8l/AS5nvPwTMHEI40J0wDg4hLOvPbgWODSGMDCGMwLZ73NSNnDeBb4YQ+oYQ1iPT5H8ScyuwSwhhrDuT/A7wim9DKTgshDDctx4cANzu998EVgkhzBHMQedRPfyfwcA/gL+GEGYFDis9/4RulDYxxt9geXVaCGFQCGExzLqmu3QWQgghJjlSKAghhPiqsx7wTgjhH5iDxm3cv8H/AqcCL7ovhuWAa7C96c8DvwT+H/AtAN/f/y1s3//H2MTxU2zS3h2HYkcX/h1zPnh7D79tixjjT7EV8QuBPwAbYw4A/y/z/b8Da/t7vwfex5xTApwCvAq8BfwYeN3vNeMAl/EXzHfBfbUj02HcieVxwN1Y3s0DbFP62f3Aa5gC4WHgan/3CSzf3vLnD/Xwr07EnDr+1WXcU3p+GqaY+UsI4dAm728LjMasFe4FTvCwCyGEEJOFEGNP1nVCCCGEaAW3YPgLtp3hl5M5OKINQggRy8cPJndYhBBCiN6ELBSEEEKIDhFC2DiEMK3vwT8bW63/cPKGSgghhBBi0iCFghBCCNE5NsXM0X8HzIdtn5ApoBBCCCGmSrTlQQghhBBCCCGEELWRhYIQQgghhBBCCCFq029yBwBgxIgRcfTo0ZM7GEIIIYQQQgghhCjx2muv/SHGOLJ8v1coFEaPHs2rr746uYMhhBBCCCGEEEKIEiGEXzW7ry0PQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKhNv8kdgCmZ0Uc+3PK7H56+YQdDIoQQQgghhBBCfLnIQkEIIYQQQgghhBC1kUJBCCGEEEIIIYQQtZFCQQghhBBCCCGEELWRQkEIIYQQQgghhBC1kUJBCCGEEEIIIYQQtZFCQQghhBBCCCGEELWpVCiEEK4JIXwaQng7uXd7COFN/3wYQnjT748OIfwzeXbZJAy7EEIIIYQQQgghJhP9Mn5zHXARcENxI8b4jeJ7COEc4K/J738eYxzbofAJIYQQQgghhBCiF1KpUIgxPh9CGN3sWQghAFsDa3Q4XEIIIYQQQgghhOjFtOtDYWXgkxjj+8m9uUIIb4QQngshrNzdiyGEPUMIr4YQXv3ss8/aDIYQQgghhBBCCCG+TNpVKGwL3JpcfwzMEWNcAjgYuCWEMKTZizHGK2KM42KM40aOHNlmMIQQQgghhBBCCPFl0rJCIYTQD9gCuL24F2P8V4zxj/79NeDnwPztBlIIIYQQQgghhBC9i3YsFNYCfhJj/Ki4EUIYGULo69/nBuYDftFeEIUQQgghhBBCCNHbyDk28lbg+8ACIYSPQgi7+aNt6LrdAWAV4C0/RvIuYO8Y4586GF4hhBBCCCGEEEL0AnJOedi2m/s7N7l3N3B3+8ESQgghhBBCCCFEb6Zdp4xCCCGEEEIIIYT4CiKFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghalOpUAghXBNC+DSE8HZyb3wI4bchhDf9s0Hy7KgQwgchhJ+GENadVAEXQgghhBBCCCHE5CPHQuE6YL0m98+LMY71zyMAIYSFgG2Ahf2dS0IIfTsVWCGEEEIIIYQQQvQOKhUKMcbngT9lytsUuC3G+K8Y4y+BD4Bl2gifEEIIIYQQQggheiHt+FDYP4Twlm+JGO73ZgV+k/zmI783ESGEPUMIr4YQXv3ss8/aCIYQQgghhBBCCCG+bFpVKFwKzAOMBT4GzqkrIMZ4RYxxXIxx3MiRI1sMhhBCCCGEEEIIISYHLSkUYoyfxBj/G2P8HLiSxraG3wKzJz+dze8JIYQQQgghhBBiKqIlhUIIYebkcnOgOAHiAWCbEMLAEMJcwHzAD9oLohBCCCGEEEIIIXob/ap+EEK4FVgNGBFC+Ag4AVgthDAWiMCHwF4AMcZ3Qgh3AO8C/wH2izH+d5KEXAghhBBCCCGEEJONSoVCjHHbJrev7uH3pwKnthMoIYQQQgghhBBC9G7aOeVBCCGEEEIIIYQQX1GkUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG0qFQohhGtCCJ+GEN5O7p0VQvhJCOGtEMK9IYRhfn90COGfIYQ3/XPZJAy7EEIIIYQQQgghJhM5FgrXAeuV7j0BLBJjXAz4GXBU8uznMcax/tm7M8EUQgghhBBCCCFEb6JSoRBjfB74U+ned2OM//HLl4HZJkHYhBBCCCGEEEII0UvphA+FXYFHk+u5QghvhBCeCyGs3N1LIYQ9QwivhhBe/eyzzzoQDCGEEEIIIYQQQnxZtKVQCCEcA/wHuNlvfQzMEWNcAjgYuCWEMKTZuzHGK2KM42KM40aOHNlOMIQQQgghhBBCCPEl07JCIYSwM7ARsF2MMQLEGP8VY/yjf38N+DkwfwfCKYQQQgghhBBCiF5ESwqFEMJ6wOHAJjHG/03ujwwh9PXvcwPzAb/oRECFEEIIIYQQQgjRe+hX9YMQwq3AasCIEMJHwAnYqQ4DgSdCCAAv+4kOqwAnhRD+DXwO7B1j/FNTwUIIIYQQQgghhJhiqVQoxBi3bXL76m5+ezdwd7uBEkIIIYQQQgghRO+mE6c8CCGEEEIIIYQQ4iuGFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKhNlkIhhHBNCOHTEMLbyb2vhRCeCCG873+H+/0QQrgghPBBCOGtEMKSkyrwQgghhBBCCCGEmDzkWihcB6xXunck8FSMcT7gKb8GWB+Yzz97Ape2H0whhBBCCCGEEEL0JrIUCjHG54E/lW5vClzv368HNkvu3xCNl4FhIYSZOxBWIYQQQgghhBBC9BLa8aEwKsb4sX//PTDKv88K/Cb53Ud+rwshhD1DCK+GEF797LPP2giGEEIIIYQQQgghvmw64pQxxhiBWPOdK2KM42KM40aOHNmJYAghhBBCCCGEEOJLoh2FwifFVgb/+6nf/y0we/K72fyeEEIIIYQQQgghphLaUSg8AOzk33cC7k/u7+inPSwH/DXZGiGEEEIIIYQQQoipgH45Pwoh3AqsBowIIXwEnACcDtwRQtgN+BWwtf/8EWAD4APgf4FdOhxmIYQQQgghhBBCTGayFAoxxm27ebRmk99GYL92AiWEEEIIIYQQQojeTUecMgohhBBCCCGEEOKrhRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSoTb9WXwwhLADcntyaGzgeGAbsAXzm94+OMT7S6v8RQgghhBBCCCFE76NlhUKM8afAWIAQQl/gt8C9wC7AeTHGszsRQCGEEEIIIYQQQvQ+OrXlYU3g5zHGX3VInhBCCCGEEEIIIXoxnVIobAPcmlzvH0J4K4RwTQhheLMXQgh7hhBeDSG8+tlnnzX7iRBCCCGEEEIIIXopbSsUQggDgE2AO/3WpcA82HaIj4Fzmr0XY7wixjguxjhu5MiR7QZDCCGEEEIIIYQQXyKdsFBYH3g9xvgJQIzxkxjjf2OMnwNXAst04H8IIYQQQgghhBCiF9EJhcK2JNsdQggzJ882B97uwP8QQgghhBBCCCFEL6LlUx4AQgjTAWsDeyW3zwwhjAUi8GHpmRBCCCGEEEIIIaYC2lIoxBj/B5ihdG+HtkIkhBBCCCGEEEKIXk+nTnkQQgghhBBCCCHEVwgpFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIUZt+7QoIIXwI/B34L/CfGOO4EMLXgNuB0cCHwNYxxj+3+7+EEEIIIYQQQgjRO+iUhcLqMcaxMcZxfn0k8FSMcT7gKb8WQgghhBBCCCHEVMKk2vKwKXC9f78e2GwS/R8hhBBCCCGEEEJMBjqhUIjAd0MIr4UQ9vR7o2KMH/v33wOjyi+FEPYMIbwaQnj1s88+60AwhBBCCCGEEEII8WXRtg8FYKUY429DCDMCT4QQfpI+jDHGEEIsvxRjvAK4AmDcuHETPRdCCCGEEEIIIUTvpW0LhRjjb/3vp8C9wDLAJyGEmQH876ft/h8hhBBCCCGEEEL0HtpSKIQQpgshDC6+A+sAbwMPADv5z3YC7m/n/wghhBBCCCGEEKJ30e6Wh1HAvSGEQtYtMcbHQgg/BO4IIewG/ArYus3/I4QQQgghhBBCiF5EWwqFGOMvgMWb3P8jsGY7soUQQgghhBBCCNF7mVTHRgohhBBCCCGEEGIqRgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtSmZYVCCGH2EMIzIYR3QwjvhBAO8PvjQwi/DSG86Z8NOhdcIYQQQgghhBBC9Ab6tfHuf4BDYoyvhxAGA6+FEJ7wZ+fFGM9uP3hCCCGEEEIIIYTojbSsUIgxfgx87N//HkJ4D5i1UwETQgghhBBCCCFE76UjPhRCCKOBJYBX/Nb+IYS3QgjXhBCGd+J/CCGEEEIIIYQQovfQtkIhhDA9cDdwYIzxb8ClwDzAWMyC4Zxu3tszhPBqCOHVzz77rN1gCCGEEEIIIYQQ4kukLYVCCKE/pky4OcZ4D0CM8ZMY439jjJ8DVwLLNHs3xnhFjHFcjHHcyJEj2wmGEEIIIYQQQgghvmTaOeUhAFcD78UYz03uz5z8bHPg7daDJ4QQQgghhBBCiN5IO6c8rAjsAPw4hPCm3zsa2DaEMBaIwIfAXm38DyGEEEIIIYQQQvRC2jnl4QUgNHn0SOvBEUIIIYQQQgghxJRAR055EEIIIYQQQgghxFcLKRSEEEIIIYQQQghRm3Z8KIhOMX5oG+/+tXPhEEIIIYQQQgghMpGFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghatNvcgdAdJZFr1+0pfd+vNOPOxwSIYQQQgghhBBTM7JQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbXRspGjKe2MWbPndBX/yXgdDIoQQQgghhBCiNyKFgphiOOcbG7X03iG3P9ThkAghhBBCCCGE0JYHIYQQQgghhBBC1GaSWSiEENYDzgf6AlfFGE+fVP9L9F4u3vvplt/d77I1OhgSIYQQQgghhBCdZJJYKIQQ+gIXA+sDCwHbhhAWmhT/SwghhBBCCCGEEF8+k8pCYRnggxjjLwBCCLcBmwLvTqL/J0Q2Hx35vZbfne30lb/4Pn78+JbltPNuT8z0zJstv/v71cd2LBxdGD+0xff+2tlwCCGEEEJoXCI6QK8cc08mJpUPhVmB3yTXH/k9IYQQQgghhBBCTAWEGGPnhYbwdWC9GOPufr0DsGyMcf/kN3sCe/rlAsBPOx6QycsI4A+9TFZvk9NJWb1NTidl9TY5nZTV2+R0UlZvk9NJWb1NTidl9TY5nZTV2+R0UlZvk9NJWb1NTidl9TY5nZTV2+R0UlZvk9NJWb1NTidl9TY5nZTV2+R0WlZvYc4Y48jyzUm15eG3wOzJ9Wx+7wtijFcAV0yi/z/ZCSG8GmMc15tk9TY5vTFMituUGSbFbcoMk+I2ZYZJcZsyw6S4TZlhUtymzDApblNmmHpj3KYEJtWWhx8C84UQ5gohDAC2AR6YRP9LCCGEEEIIIYQQXzKTxEIhxvifEML+wOPYsZHXxBjfmRT/SwghhBBCCCGEEF8+k2rLAzHGR4BHJpX8KYBObufolKzeJqeTsnqbnE7K6m1yOimrt8nppKzeJqeTsnqbnE7K6m1yOimrt8nppKzeJqeTsnqbnE7K6m1yOimrt8nppKzeJqeTsnqbnE7K6m1yOimrt8nptKxezSRxyiiEEEIIIYQQQoipm0nlQ0EIIYQQQgghhBBTMVIoCCGEEEIIIYQQojZSKEzhhBCC/+1YXoYQ+nZKlhBCdIKirWvn3XZkCFHwVSlPU3v8OkEnxl5KZyHElI4UCpOIEMLCIYQVQwiD/HpSdRiDAWKMn7fzf0IIQ0IIy7is/7YqK4SwZQhhbDthmVLoLfHzo1k7LbNjcWtnwBVCmC6E8LV25fRmOqnAazOtJ4mT3jYVAQNDCHOEEAbH9hz+9AdoU0Yaro7Uj97ShnSaSVWWOkUH2pKOlqdO0m6ZCiGM8rHLiCJ+HZDZTrvU8Xa/A/EZFkJY0tOo5bFXMT6MMcaptS3oNFNzOnV4YbBXjZd6W3g6wdRcFlthqsvgXsStwBwxxv8HnR94hBDmDiHcApwWQng6hLBd8X/qFvIQwnzAPcCJIYRfhBBGpmHOlRdCmAe4FNgmfb9dOlFpQwj9OxGWFE/rjtShNuXcGELYqQNhGB1C2DyEMG+7eRdCmCWEsG4IYZY2lV03A4+HEIYXcloMTycVJOuGEGZoV34IYZ8QwrnAN9sMzwwhhOVDCDO1k0bAKSGE9UqyW1VQtl2WQgjLAXcDxwOHNXleJ2y3hxCuCiFM10pYSv93YNI2tlv/tw0hzFmS36oi99QQwurtBCaEME0IYd5CidcGnSxLi4YQ9vIJ3OyttuUhhPlDCDuEEOZqVwFPh8pTBxVTO4cQ1gkhTNeOEiCEsBJwH7AbML6434pMz6tNQghztNkuHRRCWLEku3a98/BsGEKYu53+zcNyD7APcF5whXCL6X5XCOHhEMLQ3qKcCiFsFEJYKLS5UNGpNqlMJ8ZdIYT1QwjTdkDO9Olf/95KvSvK0OfttgnJXKCdOtcxfF7wRdzaSXMf64wMbSqsgykER4QQRrQjB+u/Z07j1EY/15GyNDmRQmESEELYGvh9jPHWEELfEMK+IYSTQggTQggLd+jfnAN8AFwAXASMDyF8P4SwaAsd0ynAd2OM6wP3AuuFEC4JIawKtRQDJwF3AEuGEG4vT7rqEkKYJv3/bVTUscB2Te63MihZIIRwRAjhghBCv1Yb7RDCrCGENUIIB4cQQhtyVgXmwRRYhBBm8wnBmJpy1sYGkusDj3r8lkue1xlIroyVgy2BO0IIhRVNXQXVCsBMwBvACYWcuvkWQlgEa/hH13mvG1mrAuNjjH9M77cQtzWBPYBfAweHEFb3vJu/ZnhWxOrsPsB9IYRN6oSjFJ61gBf8un8IoX+LE4lOlaXTsIH7jcBcIYQNQgg7hRDWhfx2ycvRgkDA4tgyway4Li3Suc3VyVWAb8UYf5Xeb7EsHYvF78YQwuIhhMEhhCE1w7McVpZOAC4JIYyrE45SeDpVltbF2rbFgXOBQ4AtQggz+/OstsDr7Q3ASsALxfutKIY6VZ461S4FU0hdCWwG7O3hAxjmz+vk36nYosDRwPRe364MIWwLtercKjT6gFdCCHu1EBZCCGsB2wJvpfeTepeb/6t7eFbG2qR7QwjLJs/rhOtk4HpM4fIfYJsQwokhhFoLKSGE5YFZgY+BXVoMSzEmWS64tUM7hBDmBR4AdgQ2DiHM7fenrRO2TrVJibxVQgjHhxCeCiEs2ebiwi7YmPefSTmqbSUYQtgAuDCEcDNweDBl5Yga4+VCzprYwuBLobXxeypreyy9bw7tK4XbJoSwJfBkCOGEYIrF2MZYd3Nsgek2YL2Kn/ckZ1NsTHEjsHvweUYLcnYFDowxfpyUo2la7Oc6UpYmOzFGfTr8wQYtZ/n344C7sMnDMcAlQN825c8AfBdYrHT/cGwQt2wNWYsDTyfXHwFnAAcDnwDHZMrZBHjBvw/Fzl7dNnkeWojn+cB+wHxtptczwNbJ9XSthgt42dP5KU+nGf1+v5pyngImAI8BP2w1jtjK7UH+fVvgfi8D5wDr1JBzD/D1pHyd7GE8qIUwPQts798vA76FNd5715TzHLA5MAR4GDiixTT6BfC618W1gBFt5P+zSTqNA3bwsr5NTTm3Alv492OAR4AXsUnTejXkPI8pywZilg7nt5hG5wM7+/fNvJ36BXAUMLCmrLbLErAVcH9y/TvgGi/vDwOr1ZD1JGY1tZHHqVZelWT9GLOaOBdTeCzThqwXkjKwMrC/h7VuPbkX2Ny/nwjcAryLDZqz2xXge9hEYn7gSOCUXlCWrsfbbmA4pji7Hji6ppxn8f4IOM/T6Ung2y3EryPlqcPt0gSsjT0JOBPrv2+qk97AnMB9yfWHmIJhd6xt2qGGrOeLdMHGQ1e3mEZXAd/07xt4nXsfOKCmnIeTOjK31+NngP1qytkIuDu5/j3W1+4MvEIy5smQ9QzwdWARLwf7tJhGv/aydCi2uNDu+PIubBxRKE229Dj2ryGjI21SIu9HWP92kpfL7LFNE1mvAKv69zWAb3vbtBXQp4acnwPLYW33xcD3sX5hcX+eVYeBN5OyfYmXh5mAaerI8d8+jY3Fz07rawvtyXrY2OZrbZalzbycnwY8hFk+jQE28+fZZRX4CbAi1vbe5HXn68DKNdP7bWAFYFVMeXZqi2n0LLC6f98UU8JeDhxQtw52qixN7s9kD8DU+AFm8cJ2KnAtMK3fH+UNbcuD0OR/7Amc1+T+gZjWLKsQAnMAY/z7okXl8utFvIGrnCxjndlG/r0/8A3MgqJWh12K3z+xDug73tiPbEHOXsD3kutjvTF6BVixpqytMEuO4vo1TLHzPPUGW3sBDyXXJwM7tphOq2IT7/k8LAthqx6HALcD01e8H4C+WAd0GjAseTYO+AE1Bu/AasCtyfUn3tBu7OXh0Ew5KwLXJdfLYIOuE7DJc27nsSA2KF3O0/kGbLC9vD9fHpg/U9bqwM+wQciMXsfPxCaCL5AorSrk9PGw3IwNtv/m4ZvF69GdwODMvE8n3aOwgX/RwS4FzJkZpl0xzf9wTMG1ArAsNgi4nrw2oGNlyfN7Yf++LXCRfx+IKWCOzJSzOXBXcr0VNlgu8r/OgGYU1skvARQrcFcARwADPC83qhG/D7DJ2misDTnU4/oe3oZnpPdA4EJsoL0Y8CdgFWzQdomXpUEZstYHHk2u5/IwLejXCwEzT4aydDzWjs2SPFsIGzhfSF4ftzRwfXL9CabgXxUbXJ5Xowx0pDx5HK6hA+2S/34RLwOzYpPenwGvAhsCM2TKmBYbYD/raf5s8mxTzBoyJ++WLdLI83GwyyyUZyuROan0NDnby/mbHp91sL63Unnq/39abBy2Aa5gwertbljfuWmNdJ6zCLvLOyl5ti3W7uWUyXWAW5Lrtby+FYr43HK0FDaeWQlTKD2MKQVnSZ6PzpQVkvqyD9YfHYQpLF4EFsDHshXpPcDLSlttUiLzWOCm5Hp74Jzkuo6iox/WbgzxsL7m9e8wbJwwtka635tcz4AtEpxADUUsprC5xb+PxNqmG7wsnJYrJ5F1o3/fBmsD9qojI5H1d3//Qqz9nqYVOS7rEmBvry+XYRY5tRY+vL48mFx/6rIuwiyqZs2Us18p32bD5mRD/XoRYKbMenI6sKh/fwNbOPuG18M6i0JL0VWR21JZ6g2fyR6AqfWDTWRu8sb4ML83rRe8WdqQOwwbaM2EDWQ+wDXB/nx3kslqTdnTkkxkMI3wd2vK6Jt8XwMzfV6thbDsiGkfF/LG/nxscLmqx/+MqkbOO4yLsUHSytjA5FZgSUwr/RowqkaYVsdMS1f1hvZhv78ZpjWvjKeH6WxgzeTehnRVehxJ5qDb/x6fxHOG5PlzwLyZcZvX47Yp1rH19/tzYh1c1gAAm2jP6d9XBA5Oni3r4awcAAAzA0P8+wD/u7yHZfkaeTY9ME9yvSrWwV3qZewzYP1MWaOxyeOp2GruNcmzHYDv1AjXbNig9krgwTS/gZfImExgk76NgGmK97GVlsI66hVcuZAhawC2CrUfcGnp2f3UWKnAlFtXeb1ouSyV6kxqVXQkcEnmu2OA2fx7P0zZeZLXlRnrhMNl9MfbOGB2rI061cvU/5GvUBiFrfydhJlzpwPmFbGBUpbVk9eLa7GBzJOlZ08X8a+QsQg2CB2U1LeLaPRdTwNL10ijCZiird2yNA1mUbAfMLYoO9gE8x7yFG8z0LAkWwTYNXk2G6bkyG3fxuKDV49ny+WJpG2mjXYpSY/LMQXHstiK9eHYoHvuGnL6Yn3jqsCZyf3DgBsyZfTDFMtfKH6xFfyr/PuPyVxh9jp2Oda+npHcH4ytek+XKWdn//3+WN/9hN/fHDg+U8YaNCYeRbwGJc8PJ9MSA+tvR/j3or7tiG3LmCdHhr8zChiXXG8OPApch41XfkLN8Rc2xrwbU1IsiY1tLsHa7qyxK6You8k/T5WeZbVJ/ts+2Jh2+eR6NPC8X0+LWVRUtgPJ+2djipdT6Do2Pwlry3MUQsOAJ1zW0pji5QaX8wywRGZYvkGjPTkB79cwK7FXgFUy49UfU7SPSu6thVk+rl/8v0xZ4zxtRmJKimf8b6Hg35qMMVjx/zCl0nf8+25enu7A+qzcdndmrI190PPuMr8/g5f13IWqBYB1sXau6EsexCwy+njZzC3j+2NjtZNx61lsrLIHNs7N7b+He1k6y9O+dlnqLZ/JHoCp6YMPVoDl/HoGbKX4LWz1+EZqah1L8odiE+LRyb2tsBWtx71AvkfGJAIbqO2F7d07AnMgWTwL2MDyTWCFCjl9ME32nMm9fi6jj8f/FTIb/JLs6ZPvq3vFPQnT3r5Q8W7R4S+EddRXY2bTw5PfXAesWyM8Q7BJ19XYiktqUnYamdYY2ERwvuS6D6Zomh1bpXwwU84w/zsQW8l5DVsZ3AXT6j+aKWdm/7sxtop4DrA2NlE5hkylEjbYHlYuZ8n3Q3ENeoasQaXrPl6mDgb+TKKQqSPH7w3ABoe/IFltrJCT1rk1MDPONA/rDCZnLV0f62V7LUwhkJv/Ew18aKwCHUgySa3IsyL/N8cUgO9iK1RLeryyypHLGOp/V/OydF6dsoQNkA/E2o0xTZ7Pj7VLlZMkj1vTdgebLD9LvsKtDzB7k/v9sMnN48CdNdKpUEqMxvqMVOl1WE49wZQZxQrJrF6uT8UGbQu7nKyy1I381bEB8z7AbZnvFErA9bwsvdNKWfLf7uzf1/T8OhsbrG2FKVGfyZQ1Tek6nQQeSmJRlVMO2i1PmCJpEf/eP7lfq13yMjlbcj0U6x9fpbHSPTZDzijMTPcIGsrgQZiy8wMvUz/NrHO74+bkpfsjsX7zWODamuVwG8zk/WeYJc1MXrYfyHx/pP/dDrN23IOGVeZxZLTb2FaCP9KNhRWmIHgrJ4389xNtRcH6t+8AvwSWqplG5XK0A/BXktXYjHxbN7mew+vYCzS2nKyWWSaLCXJhrXASbbRJXq6LPqoY1z2LKV5PwyeXGXLSsd/B2Mr0MzSsOc4msTjJkDfGy8+rWP2f2+9fA+yWKaPoB/pSajswZeCemXJShXsx/g7YpPcx6lk7TUPXsc0Yr7sPYX3zR8BCFTL6leTd4mXsXWzRag4yV/HxRTIvP3t7GTqviDNmNZilFEzTPLk+BFPmnA5cnPF+Grd9vQy9neT/OcDpmWGZK0njkzEr3AuS+9llqTd8JnsAppYPtsr8CraS/hvgkNLztck0Ge3hf1xEQzM3GPgajQHAmthgJHcV6UJMe7y9y30Xm8wUK53bAidkyDkLU3L8L7aSMJFWDt8vlRmumTGtbaE97FN6vhfwFyrM+Jq8txDJig8wHTYxmTMjTIWmdVqssR6ImT1/x+Uuj6269Liy4HHbOg1bIntfl/dKRtxGe0NzOjaR2cDvLwjshGtsq8obpti4FessLvB7M2Kd7aXYKsUdJMqmHmSNwfxLzE5poot1bPNik4u5MuJ2rcftCXwvfuk321GxGoithl+OrZQe3OR58HKUMwEYgw2sZk3upZY4c5ExmMQGpXdhE/6fAvv6/a9hWyce9DSfaOLaU9zS9PZy/TpmOtlj2fZ4fT/9f9jgbQ+sfbgS05T3GB5/b25s0HCpl8l1vK7s7/HNKkvYIO98bBXiakx7P5iG8uwUMpSySXmcrVwe/fkQbCBSuVqOWVw8jCmEf4INjtP8H+hlqdKMO8m782lsTetbev5uRllaABvIzlO6v5CH8x5sr39VGZgFG0wdi1l+rJI8mwGbyH2YkW9zYgPg8/D97cAIzLLgQswap05Z+hjrn4rB/kgvl0d5GbmAilVOl3Orl7+n8Em2P+uD1cf3qtK6lG/nUerb/fngnPLk//Mzuq62B7r2CX+hol1qUib3wpQAh5KYz+Z86FrnrsXao4BZd22CKRoq/TJ5en+CtQF90rj553lse1dumTwGG+zPj7Vrp2Dt5/nYymuVnKJ/uxY3saZrWzkaG6vl9AE3Y+3RS9gkdloak9t+nu6V/qZKZfJpfLJe+s1hJP40upEz2sv02sm9/qXf/JkMa4ck377w7+Xxux54rkY5KsrkzVhfv5vfX8Tv3U1Gm5TIWwSzdJjIotHL5fPYNrpKc3xMOfovGv5YZsEUsndj/cSlWHvao8WLp9UOXhaX9DhPk5SFVbCxQJWcIm5Nw+7PX6Vii4n/dmWsDVjBr8sT5vOxsVRt3xp0bZeWwMb5F1a8szSmaFk6uTcPZmGQtaDUJN82Te4NwOYdZ2KWyj/OSO/Bpes0XrNgCoHXqspSErflknvjsDH3+1hf92JVeJK4fY77TMLa72lobMvKKku96TPZAzC1fLC9ooVp0Vhs8JTt4CVD/jBsoF5o1i/D9qLdjXVQlQO1RNaswFvJ9Vhs8HYPDdOkAbgpXg9yFsdW1qelYSb3jeR5K44YHyFx3lZOQ2yieVWGnAOxAd6x2GrXqNLzy0hMOitkjcc610uxjixgg65LvZG8j4w93T3FDTMv/Jw8Jc7d3qjNhJmn/h+mPZ6nmewe5NyOrR4sj00el8fM1NP9yll757DJZGE+OAdmMns0DTO5nYD9a8btW8D/w31DJL/J2Q5yp5eVXTHz+/k9TIXmdwbcoU6GrNuK/MU6nyWw7UCFRnov4PBMOUUabY/5CHkYm/wGrDPJGUB0F7ciPIeTobEv5dloTCF5AI0tAqFG/t/rZWkBbKD2C8+3OYryWCUL277xYnL9stevy70s9dgeZZbHtBzl7i2/E5vc9MFWSF/AViVSU+NcS4c07672vF85KZdfJ8Mpo+f7If59YWz15wFscDMtNgEfniHncaydPAJrM+/36yLfLgPOrhmvazDl5lI0JqeV+V/Ku8L8+Ickq2s0FN4526bSMrkFNuD7Pu7MGGvLs3zXUNGe5JYnbGJ1kdeN28vveL5VtkulMrkeNoh9mK59QE472V2du5JMh8yluJ3lf59jYkusb5LnGyQtk4d4vMZ7uZ7Jy9TQDDnl/m1ZzFpxIX8+K02sKZrI2ZiGw+nFMAXFSnXSJqNMLpr8pke/R/6bnTDfBK95fhV+HQrrgIVJtsLWzLfZymHJrG/N2sknPa5F3lW2SaW0+hCzcJrf7wXMvH8kZnGau4L/ENaunIUr/TCrgK9hfd+qZGx/xZS9x2Pj+9uxVeXVsfFyH2w8tkcbceuLKWKvxE4Ayonb4/45qXQ/XQhbIENOeYzct/R3KGY9UzV5Pw9T1J7g8RuNKd33prHNJ3f7xUPY/OYcum7nXQdT4h0HrJEh5yasTVk2uRcwZWBfz4/DMuSU41bUt+FevsfiVnoZsu7HFBCXkNT/5Pn+OWWpN30mewCmhg/mYOta/94Hmxg8QsPT+SYkg882/s8RmOXAHNhq5tcwLfxpJKsvGXKm88K8tl/PinVKYzClwpKZcs5LKyFmyvQiDeuCWnHGtH8/TK53wrSrV9MwpR1MhaICmxR/H7MK+bp/fxxz9Fj4nziCDI0tNph4EbMA2RPrNGdKni9IntOznuI21O+dWRUmz6tnkut+nv8XYas3uXsTV8T3Ifr1h1jDfYY3mBvWyLdpsUlE4fH2aUzZch6mYc01besubhd4uuc6hVu9FLefeXk/BzP/y3aKinWid9AYsD1MY4XpwRp1ZUYvg3Mm985xOa/iipcOxC1rv10PeXYBpvHP8gXg73Z36sxRXney2gFMgVS0mft6OeyH1eNngI39WVX9ryqPm9WI2xBMybVJ6f4BmNJk0xqyqvIutyz1wQbuh/v1S9i+yxOwFddK5Za/NzvwcnI9DFOWnYRZSxUDyar07i5eE7zMZ1nN+btrk6yKYmbIV9IwW891Vld1ElL2vtSMfFsuU06PJyF1oEwegg3465TvqjqX6xNkI7r6AboQOKqFuHVXJk/F+oIspSLd92/fwSaFdfq38TQcTg/0uvYpvnhSo5507HSu5L0laWx3fBW4oqaMpvlGY8U9t751VyYP8rTPdlKXvLspjVN1HsfGYhNwZQ7eH2TI2R1b9BngZaCLkrJGeJYl8VGDjZePxxQVWcqbjLgtjylMcvvM3VzGcC9DD9KijzYvPz8mObWGxHoKW0yp7KMwBcJzWD8yAVvUu4sac5Qe8i31O5OrlNjV6+tBWF9yKJlWMhVxO9/j9gA1J/7YXOJB/34aZiExtpUw9abPZA/A1PDBJvbLYhP1onPZCdOK9cHMv2qdKNDN/1kTM216Gjguub8TNfbvJu+8i5mN3U1jBfY4Mo7Swibmy+F7QZP7j2OdyyokVhCZYVocN9fEtJnPYpruG7yhy3WWchawVXJ9TNIALOD3ck8JeBLfUuDXX6wM+vW8OfIy4jZ9jTBdi2nG+2Ed0dN+/zJKHXoPMlah0TGvQ2KKhpkW1zqiEVN0neHxvCi5vw2+Wp4Tvw7FbRMayrItcSelXk9PpWFilpveh3m9WJ6GY7GR2MDrwBpxOxlb5ZwPWykrHEsdRWZnmxG3/fw6x0KlpzyruzpZeepMppxiIDs3XVdbDyfDemcSxW1jTPFTXnHdDDixhpyqvCu2wOSUpYW9Tqxdit9c2KAp67hArP1J+5I+mLntAzTMg3ssSxnxyvY0jm13W7VIB4/P7WRak9UokwfXSOuOxI8OnoRUUSbH15TVdp3DrLTWS8rQStgE57g6Yckok1tkyqjq34rxTp0j+dKtSVthiwE9buGrWSYPzJRRWOnsBtzs39fGfCa8ha9sZ5btTuZbR9rJ0rvfxiZxS2ALQ/+lnrPSvl5uxib3zvB6WxzLmOs8by4Pw24kloTeJnxAfb8XTeNG/kS5H4lyFBtDXkBy/GgNWV/HtttuijmcfiiND7Zdt7JPodGW7Oj53s/D9E9M4b1WTpgq8q04NS833zbFxgOzejxPx6wei+O2LyFv62N3cfsfj9vamXHrg41zF0/uHY9tXyusTLOtMXvTZ7IHYEr/eOEYSldHHX2wSfX1mIau0tFHxf8YSkNLOJsX4n9j+7hmxlYBt8qUdThuduYN0I50Ndv8Icm2hRphLDq5c7DO9r7cMCUyArZKvwumTUxNii8kwzzRf7sL5h+hOD7vSW8ADvJKm9UwYgqiQ7BJSaEo2pCGh+jtgXM7FLccs61h/ncBbFXydW+YihWmQ0n25tZI9xno6h14D+COmjJmwvbp/Sp9F/OuXemEaRLGbRRdTTj3IzmOMlPGvNhq3Q/oOnnbkwyHbl5nB2H7sO/ErJcuxhUk2Gr35V923NrNM//tQpjV0KzYIOR9khVSMk6dwR0eYpZX/cr10+/9gBpmxh2K2zIermHYoOP72P7Zov3ckRp7jCdBuTzJ68mL2NFefbHBbY8Oa/3dwf53cU+n20mU3lgbVetor07Fq4nMeTHF9xlkmJN2okxO4nxL9/GvQY2TkDpVJidFnWvyP5bAFj+6+ByZDGWypf7N69M8dF0VLRzefQ1rw18lz0JxkpRJzOpqUcwi62RskeeiunLazLeOtZM0Jm3FeHcP3CEsNtG9BRuj5mwrKGQVjmuLvemLYWPCVpSUa2Km89uUysV5wC4ditvuGeEoZBR+E4q4rYMpN+oqzZeioUAejs0TfoSNTYdiFgZZR70mZeIFbAz9PLbId8JkzLcByfcFvb6dgi2q1urDOxS3YkthEbdR+DGYdePWmz6TPQBT8scL+CPYitArJEeXYJ3OE5gDph4d7FT8j/2wSfA/6HqqwMrYnuwLyDdxPRZ43L8XDVLq8Xo7krORe5CzPbZi+0C58cPOmv2c1gdsS3tD9hm+OoJNxn5EhWkpXQdqB2Ar/4+QTNYwE9XsY8ua/I9pPN3nwya+2ZYnbcZtqP/fVPkzd/J9IPBzKpQu/v+2xSYe2zLxasIgbCBZGS9scLUMyRYCGns7n/Ny8laVrA7GrT+mAV8Zm2CNbBK3lzPj1hebyMyfvHsY5ujqXmzf8jtUDLg9bvcDCyb3RiTfp8FMQr+UuHUqz/y9wZipbWoiua3n1cOYAq3y1BlsYH6Hp8NF2GBtHn/WB+v4b/6yymMSt7fp2qZvglkW3Yqtor6WKatTeTcIG6jvgq/cuNwHPUxnYwOdHo/0wpTdz9JY2RqOtZdPedn+Jnbc8WpfYn0bhCl7dil+T1cfM0tilnRVZ4R3pExOgjq3LF2dlBUT076YsrTyJKQOl8lO1bkBmNn8hjQ88acr+Vtik4AqZ5WdKpOd7N8mYP3S53SzYg9slyGnU+1kP2zMuSC2st3Xy9ULwAfJ73KUAJ3Kt46VyTQMJE6XsS0nt9Cw6Ju5Klz+u+K44man88zlYTyyKr0wpdKj2BaHQdhCwpXYSU/HY23WR1Rso+hw3Irxe7EVOB37LoZbmJFnpdJUIYb5iTkN+A/VpzMNwpRsqVXDNtgW6meTspLjrLDj+dasXnj5/wPVTtA7GbeBmCJioi2u2CLI97CtcJU+S3rjp9CWiBYIIXwX6xBuxwrveVjB2j/G+FQIYXNMS3tNi/JHYh3qxlhjtolf/wNbhfrfEMKAGOP/ZciaHltl3T7G+NMQwp6YJvq/2KDiMkzD/O8Y4ycVYXoV27cXsYr9P8CpMcYn/Te3ASfHGN/JjOe62AT9gxjjY37vcGxrwMPY4OLvMcZ9KuRcgGnHb6PhvGcE8EmM8T8hhFOxRn+rjDCtgjXMH2ADiqdijP/1Z0dhk8vrYowHf0lxuwizAtk7hDDY4zY/8KrHbSNsknFKhZzLMXP9zzBv4gtjSpZLY4z/DSEcjWlP9+5JThKmUZip11UxxkOTZ3tjJpgfxxif/ZLidoXHbTBmoTINVubviTF+HkI4HRtg7pARtwket29g5pon+v1R2ADiv8C7McaHK+RcDBBj3C+EMAxTMCyJKdz+HUJYAZsAT/gy4tapPPPfT8CcWU2HTVAujDF+7s+2wo5Z+3uM8Yc9yJgXU3LOE0IYgk36l8S8/F8WY/xtCGFF4J0Y41++5LiNxay4nsKOiyvq/yqYkuK/Mcb3MmR1Ou8+x9L2gBjjv/3ZEpgD0xhj/EmFnLOxAdEH2NamV0II02GTul39Z2/FGB/8MuLVJG6fYv3LdMDfkjK1dE9lyX8zgTbLZKfjVyqXl8UYj2zym01jjPdnxG0sbZbJSVTn5sUG+gfHGGMIIUQfXIYQ1ogxPl0hp1NlsiP9Wwhhcew4yXEhhJkxBcsvMI/z98YYf97T+yVZE+hAmQwhXIb54pkfy//fYeOdBbC+6MUQQr8Y438ywtSpfJtAh9pJf+dMj+N/sHHAqVhfeytWLp7JkeOyzsHqbn/gknL6hhC2wCbmd1XIuRkrR9fEGC/wewth22oGY1Y+T8cY7/0S43YcZu3yJ2zB7Fel50cCH8UYb6qQswCmbN8lxvhocj94eZgB22a9TIzxlz3IuQ2rGwti84KjMF8zJ2D+5V6qEbdO59vVMcYL/V6fpO6dhG2j3r1CTifjdjk2N5kds1x/qPR8ccwCrsdxZa+ljvZBny7apOkxDdXSpfs7YQ10lqOmiv8xAfewjXnv/hdWiB/FOslsj7ku4xysAxmBaek3xzrxq8k0bcTM1+4t3dsN69zOxSpLlmNAf3csdlLEuZiWfvXk2bSYSegsVHuIXxzTlM/o18P8U2jfZ8ZWinM8+S6NWTdcivlduBBb/SvMy9bEBr5Vq2Wditswmp/wcZv/nR9rfHvUavr/+lnpen1MI3qs59305J00sDg24QfT4N6BKdROItN8t8Nxmx34hX/v4/VlD8yCZ9vkf+V40E7jNjNmTnoMNZ35eFremZSbm/xzH7Zqs6yHtUrT3pG4dSrPkrL9sn9fGdvetWAdGf7uEtgkbc7k3tzYqumrZNTXSRC3JZK4zeuyNqgjYxLk3WLAa8n185gi+yJsT2hWX+Dp9DLW/hxKCybknYxXD3G7De//KK0yT+oy2eF8a1Yuz8VWNteuEZ5OlsklO1Tnxpby7RFsLHEa5gG9Mu87XCY72b9dgPuSwlbr/4Et6FxFxvHHnS6TmEVCUR4HYfvCT8QUHbW2pXQw3zpWJl3GRli/uASN4/ne8TJRbIfJ9Xu0AWa5sRS2VeoQzFHndmSeNuNy1sPG8+M8LBfTwv72SRC37wPreprvgi04rk6m75xE1rWYQuFKGo6nU0uVnak4MQpYDW/j/Ppb2Pj5Qhom/ZM73y6k+fGjVb7POh235zEF1J6YQmlzzEF8Vp3r7Z/JHoAp9YPth97NK+Sg0rN9qeGIrof/sTwNxzFH0NXZym3AWjXlHegN0TW4AzC/f6g3lDkORYZjneqqpfszYZ1Sj5PsJvIexrdNYPu27sU6uDPpaiJaVfGPx7TjYPvIbvOG8nQavhSytjpgA9mD/PuMmCb6cGxwWxypV6k06VTckvxvdsLHqfhWmIw06uN5v1lybyBmJn4PfgZ9ZnjOStJoc+x88dWwjvI1Sp6sv4S4DfF3U6XNMMyfx/M0TGlz4tZsMLmpl+/3yDjbO5G1C+Z8aRS2D7E4XupbZDg/dRlDPW6rtRM3L7+dyrMJNJx/DXfZ71HTMZW/fzQ2mF2GrluwLiT/hJBOlsczsNV/PL/2xyZaq7YQtyLv2iqX2Ip9kd7rY97858NWPm8F1s0Mz1V4P4JZ1T3n4SvatTpe3dsuk/6bQ5L6Vo7bLTXidl4Hy2RH2pOKcvkqJafGPcg5vVNl0mUc14E6dzJu6o75KPojpqzYCTvNIPf0i06VyX50rn+bh4aJ+p7AmsmzCeSfMHBeUrZbLpNY3/gUXZ3UzYyNK16lXp90UofyrdNlch98fzwNn1wLePk4oKasR2k4QD3C0/xMbMHifPJPC3mahtPKubD2aPvkea5jwE7G7YUkbsd6mt/kdTdbFo0J7vRepu+lYttVN3I29HeHJ/em8TA9SMYRr8l7j03CfNsueV5sq6hqv9fvYNzuSuJ2KGbxtB82rnyQzOMme/NnsgdgSvxgg7t7MbOcc4FPSDw1Y5OFe9r8H1+cBe9/i2Ozir1T9wI7Z8pKz2/dAhuA/pKG5+o7yD/ztq83js9hjk0G0Tgm8kXciV5uHEl8NgC/xTSRi2KDlRvIH0yshlkgDMYm7WthmvjjsVWBbM0tZrXxKF0dOc3lDfZ15Dlh6kjckvxfBXMg09YJH57/P8YGNunxl4cAE2rIGZw0yl8ncSqJHc21W424rYF1HM+0GbfdML8lB5Ioj7B9ilmnVtDwLF9on1saTOIdHzZQexdznnde8nxz3J9Jzbgd0Ebcpk3SfMtW8iz5/UQr4tj2p2vIP41lJv87Ghs0TMDMr4uB/1vkO5sdTGMC0FbcupG/n9e97CMQS3n3ZDt5VypTS+NWL359UE7dxdrqNUr3pvN0r33etdfPtspk8k6Rd8u1Ejf/7QxN7tUqk03yrd32ZLok3zpaLlspk8CO/nc2r3Pntlrn/PdFe7IaXZUvx5DhGA6zPlutg2WyI/1bOTz+txh7PUvmMYF0oJ1M3iucSm9BMvHDlFY71pRV9N0t5Vsny2Ty7nyY0mSLNJzYwtpduCO7qvLo5adYiBiEOcEsTvdaBFtsmjNDzmDc0WKSXttiVqeb1ozb/J4uW7YZt6E0FFSDsHHFIti4fBOXleWzzdNhA/8+vV9fW5RLauzjxxRn+1Ga8GOT5UoFVZJv2/n1gDbybQiwq18XyptW820A1ka2G7dpaBx73R+zCClOiJsOm1dkKZd782eyB2BK+2Arje9jk4RbvSKvjO2xfBabdL5DciRIC/9ja8z8sKmJD6YRrPTk7b/dnGQl1CvbktjqxIuYo6scj8dr0/WYk7WxCeDDmJb0CpLzo2vEteik+1I6PxsbzNXRvF/on3vo6qTmadykvoasS4CbSToAv/8kFU5cJkXc/J3ZvOH+N6aUqHXCRyJnPmzA9QxmNrkQNpD8Zqtl1uUWg8unqaFYSuJ2bgfith42wLoaWzGZweOWdd47JXNYGoPJIm7PkjGY9LgUDpP6eX373MvnOMyhZ2XcMIVY/yRu57QSN7o5aquVPKO0upaEbxZsxeWmDBkHkEwYsNXf7bBViHsw50RXZoanqbl/Uv/qxG0Ouk4eiwFJP2ywfQf5RzIOS8KwntfdqzGFc52869HZE7aquHVOmLpJn/UwBfNJLchouUwmZaZ/OY5JuWw1biGRn1Ume4hfS+0JZrU30Qpm3TqHTa6HJNdpv5JdJrFVsT/gfRq2Yr4X1lbdWbPObZfGpUncniSvfRtaui4US7XKJF2VvvN5XXsGW43P7t8wa6lzMQuFZnm3N/BoZpiG0tWxaNFOzlqnTNLVQ/0+Xh6OxcaJfT1u22TKqjoCNivfmuRXS2WyiZzNsP3pt9P1CNMf0+KEi9L2FMxibSKneDXkfRMbB2RZ8iTvbYQtnNxF1xMi2onbvKXrV8g8jQFYspT2s2MT3f0z35+Gxvav5TBl0EOYZdmcWP/3PnmT7okW1ygpD3LyjYnbo7T+Zedb6b1xHreHMQVcrbg1iyelbWUet7Gtlsne8pnsAZjSPphmeX//fildvduu7YWvR2+vFfID5v232Hc9PabFLI7gmRnbnlC5V81l/RD3+o0Nimejq4njnFSYEbmc90hWjkpx3ssrax2z4oXLDX3xv/zvzmSu4NLwkzATNiB4G7NKWANz6PS9TDlDcKUJ1jHujilKrsG8qx9KsvewQla3WyJy49Ysjbw8rEKNEz4wD+VLlPJzBmxv2l1YJ3JUZrz2wAZrr2Om/HORaLKxQfdjGXJ2xRQHL2CrR7N5I70qNrjNjVsqZ38PzxKYRvphzJTs1My47YtZjvwGWKfJ86zBJGaG+jnWCS2c3J8PWwE4Ati7hpwrsG0gAzCrm20wL85ZcfP/dyPdmPfl5pn/9pCe/ic2IT+vQsZI7GST2f16OLYH9wtv49igO2ev83FeF9ak5IW/hbgdiCkzPiXZXlYKd9Z56nQ9necbXufm8O8PYAOvnLzbA5t8fALs1U3e9uiB2383hB4sorDz57+TIadLm+RlcrGkTGbFq5Te72F7wfcgGQzXiNuu/v73XOZsdD3GubJMlmS13Z5gbeN9wN8pKaXrlEsabcCTNE6cSQe7WWXSf/cGNhF9iq5bVUZiq2S5de4o4IEenh9EXv92ALBnB8rkYZSOW8O2la2A9W9XkN+/nQH8HjNp3gVri0Z7Gs2GKSgqJ39Y/b/J838iRQY27qosk14nLsSOv10bW5VeweP8EDZBmpAZtx0w5dgbJCeGtZBvK+CT0eReuoAzkhrKSSaeBA72+vwp1obejR+vmCFrFa9b62FbX8ue/ceTccwrNjbaBfNLsDolRQy2HXOi9riJnAUxy6Qx2PhmqNefVuK2JWbd0t3iQG7cpsPGOtMWdSV5tjLmpP30cto1kXMetrC6QKkMPYuNmR8DLsiM28X0cEwptvCVE7cJ+LaCbp7n5tsFlBRrmDLvGWxuUSduJ3jepOPk1E9FVtymhM9kD8CU9MG01q/QWLVa0ytUZadX438shTs99Ib5Uazj+BA41u9Xmkf5706hcRzNIK8I92GD00NqhGk85uEWbxAP9AbgZGqcTZvIG47t2zsP25Na1tbNAvyUUqfVRM4Ib7ju8E/AGu3VsA73cWyAUOkECfOV8DCmgPnQw9gH26KwkzeSx1Jh6eBhut7z7WX8LN3k+cyZcRuFrdCchW0pmMiMjbxVqVHYdosbsMngRGWH/H2AI1zWOGxLyV1Yh7iNPx+Mrb4ulCHnPWzlaAXMs/fLwCZ1wlSSsyKmiHuRpMMl38HUCGylYEFscnQYNihZLSmzJ1MxmMTq7Pcx5cGxwGF160ci52VMKTaBJmdck3dM0XAvz8VkfQlMMba5/4+hOXmWhOmHNLYqzIlNtCqP4CvJuZRGGze3l6E7MSXD+Nzy6O9/C1MgTvB4zYs5eVvY87RO3H6EtQNLYnV4Ly8Ltcx3XdZbnj7rYtZOm2ID0xFFXcmU8w42YV8D2yubmgMPwPqgHttgrA14E3OoWz76sI9/BlCxlYuJ26SRqRz/m7UPFFOQfYopNhf0tD4Tm7At5r9ZKyNu5bbkfayP3iQnHD3Iark9cTnvellax+O1vde5GXPrHD20JVhfl+2fCduDfIx/P9jlTqQ0zZAzzNO42JK5hufZnthWnCFYm1DVT5aVikOwvnZM8pu+GWWyaCcLZ8yLYJOlTWhMmLKd6HlZvBab3Dzg33+NbwvIKd90rf/rYGOlLbG2YCKlZ4acJTDrrVMwfxCLJb8ZQYZpupe7t7E94ZtiE6NlS/n6zYx863Y8QYaD4SbyFsLGbhNNArE2Ykes/cvp67pzpr1ckrevU+1MeymsrT0Tsyz8LrY9KT02cDoqlG8u5xVMoXUztiV3d2ycNAxTXi6aGbclvb6c43Fc2uWM8ufzeVmp9GGGWeB8QNetLqlSYUUqtmJ5uN9IZG2QPJseq4fDyRujLoq1l4VzylGYf4lF/XruzHxbxMNUWJVO6+mcbn3KybciboOS8Mzu5bEftn0lN25DvQz8Ghs7FT7VRiRh7rK9ekr+TPYATEkfL0zFwLxYSV/YG4qd0vtt/I/BuAMhzHTsBL+/ENZxZlkBeFhPwrTke3ihPd0r1NKYyX3lHj6so3+HhsPDG7AO8tvYIOfMmvEr0u0ObKB9isdzFUy7ORxbCcjZp345NpGYCxswjcMmAMOS3+Q6cbkcO+oSrDPaxcO2cjnsFXKupHF86JHYkT7Fs4HeMG2aIWc8tuK2N9YxHotNHorGcqL9wt3IORJTrKxLw7nNTjTM8eclX6GwAXbcYXpvC28or8EGgJWeeLHBZ+pfYjGs0y4c8PQlz79EMzlPuJwzvA5UOhr1dw/HjgDC68ffvWz9FKuP/chwwON5dbp/XwU7cukiMvxulORcDJzi3zfHVs2OoeHUMdez8OzAXf59YWygdKqn970kvjAyZF0CvJ3IfQZbPf/A02hgVbiwwcZZnk47YQOFfbCOd2GsvRqWGZ5i1f8uGqbAl2InzhRKrtyyfTa+YohZgfwLGzBfhq1KZ5004O9PoPnpPI9h7XDuiQxnAeck11t6Ohd78nO98Z+HTSbuxFbF1vR8KJQAuVs4xjNxm7QWNdsk/+3cnq6ptdzimNLubvIVE83agMexNuA0akxyupFVuz3BJrS3+/cFgX9iE5InsbHCkJxySUZbUlXfinKCbc9MTecPxvq8Wh7rsfbiFv8+H1Z/j8D6vbvI95/STKl4G6b4OiUnffzdi4Hf+fdim9wEGibKWW1JSeaxNEy5H8aOoDyV0uJAD+9PYOL6fzxW97PD5Hl9hn9fGTue+3RsPHYf9bzen1+UJb8+BNu3XbcNaDae2JHGeGIe6u29fx5rB2708rh43fwqpXt3zrTnKO5nyLmChq+RGTFlwEVeLuvE7XrcgaOX8aew/rLS8rKJrBtpzDGOwNryZ7F2qVB2zZ0hZ1Gvs4dgVi8TbZXBxl9Vp2o9RmPb0/7Yin4zHzY57dMBuNUXNsZ8GGsn03zIOZ3tUVwRgik6r8IUp6fTxLq6Bzn305jvrOFp/SQ2xt2qTtz8d6t7HJfHxl1PYMrqof48y1n8lPCZ7AGYUj/YYLbYD7ONF9za3qS7kb0FjSPBtqXR6F9Lk5XKHuQM9wp6LvCD0rO7Ke3rbxZH/7uu//7ZVA42mH+MjBXAJrK39Qq/LrYa/DKmYc3yK4Bp9tKjj36GeXK9CjMzztpT6O/Oi020+iWyLsAUMq+T6aQIM2t7PbkehR3PuaZfZ3tx9fjNhQ2I1/SwXICtnHwDm8TlOIecERv49cGUUrt5ufoONgn7OZkDE8zK5TpM2ZI6+pwG63SzjsPCVm6uw45NnBOb8Ozlci7PiVdS/q5pV47LWhTrwHbDFCSF46OATbwrOyT/v9vR1fxzTmwwUMdZ6RCvE6nZ9jIe12Vq1rM+mCJgf2yytkvy7BZgw0w5AbPYKHyLfIj7QMAGIPeSr+wcjK2SXcbER9A+RY3j9Pyd7Wl4ib8FWw04Hduzmtvpz01jYrwvyZYUrE3J9i+CrZQXslo6ncfzbSOSSYzXv8f9+3zYykfVCu5AbOVxNpd5KKYIOtvrz6LYACpnlbMjbVJSni71urFi6dlN5DvjnJPOtQEdaU88ra/CBvsvk3hex/r1TTNkFG1JaiZbuy1Jy5P/Lfq4EZjl41OU9mFnyDrHP2eRrGRi7efuGe9P5++eT3Ol4o3kT7oLT/kvY5aXuyfPbidZha0Rv/m9XA/GJu9reP7n+ITog00citXWVut/P2wcWJzydSru+BvzOXIvmXuusTZgV7q2JaPw7XvYuOVFMpRLdHY8MYPXt6WxtuUYL5PFws78wDdq5Ns22Hi0mTPta8lbUS58QFxNY5vxdR6363EFT4ac/lj7cSJu1eQyD/YwHlcjXgNwhTemNPsUsxAbS8PpeO7C2SM0JunbYH352KTs5igA5sAVpsn1ffgJXS3Ut8W8PM/g9WN1bMFiW49bjjXfKKz/uQtrE57GxhjLYIqg68hbpPoapjC7Aav3r2BzqLmxrco35qZ1kqazYEqEVf3e97AFoqupsVAxJXwmewCmlg+2l7fWMY6l92fEJrYr+fV+NBw9bogNct8gbwV4LLaVoBhEzENXM7kFMCcgVYPRYXSdHO1G0sBjA5wfUfPsW383eCM5Btu7+TtsYHABGSsBWCe0RvI9beC2BsbXCMtQ3JTdG47zk2eLeaOWs7d0LhrehYu0/xZuquoNVZY3Zm+I0pWoYVgHcALwMa5BbSHdp8EmPLtj206y9pYm7xd+Fw7zvCtM7p4lOU6ph/cX8r/HYyalN2EDv2KF40kSB1s9yOmHDZROcDk3tyInkTcQW305Cpvopt6YH8RXBzLkfOGkyj8Bm/C+TQ3ncjSUeQOSuB6DHTWUNeHGnRphK783YFZB59Hw4nwv9U52CF5XdwKuL5Xzh6lQ4mF7kRenMYCZk0QJhQ0kXmyhTI/CJjPzYavJK2ErU7XPRPc4FnEq8uBOMj3OU1phwxVvicx7yTidx/O7H41Bbd8kLOtgA/FDM8PUj65tyexYO/sgtkqVu788kLT1WJv0Ddpok7xMn4v1b4WTsMfI8zEyCpsAFG1Ju23ANJgVxoM02qWhdWV5mLbGLAO/gylxCvP7e8iYdJfk9fc8rN2WYGOBDbFV5OmZeE/5JTlp7b8tTpoagSnsbsP6gjF+/y4y9icX9QLrzy6ldCIWrSkV18EG6P2SensfGe02NnFdCpuAFJ7Xj8J8V1xeJxwluX1omDbXrf/DSnIWKMl5kHqLJtMl+dfH0+lxrD2+isy2pInctsYTLqM4UWk45kPpPGwB639o4uuhQtaFNHem/RT5zrRn8XL5GNZvPp2k4fXkK0yW8LS9zD/P+f3ZPJxZ1pP+zhis7b8DeKL07HnytvXNDFydXA/BFHuP45bXdcq2/03HO1djStRsK45E3ulYG/4YiSUoNvkeV0POqcCfgUdK95+gic+2HuTsi82Rbi/df54WHGhiyomDsLnYz7B5xaW0sO2sN38mewCm9A9tbnFI5DzsDeEt+Co9tiK1P6blPJrMATK2x2qb5DrdHzUdNlCayOlYEzn3YoO9lZrFF9MCZg0gupG/Fjbwv8E7kenImJQ2kTMTiaYP097mOnQMJPtiKXkcxxwhfT9DTuGtfHDp/go0HEQ+nSFnIxLFlDfUaf4dAPwiQ87Cnr770tinmubdHsDPM9No8UTWbJ7eF2FmtJdiA8nKU0ewQfZruGketsIxE43B9hbAjzLkrE8y4MTMikfQmEhkyUnCsAqwMY1OckmP0xZe7yqdelJtIrgriTl1D7+bH5sEbExzp6WHkOfMawts8lKkydKYefAjmKXCNcBTmWlUTu+BdO3016NCEeD17ElP14doOD8tBv/D/HmWxUQis3h/P+BvlJyzZchYBttSchZdz3gvysJmVXFL3tkOM49eP41b8jzrdB7P+6YWEVj78IfMMjkaG7gsg/Ulfenalownsw1olvbJ96w2qZTeZ3vdmw5biToNG6w9Q2kw2EP5Pi7JpwW8DA1Nnue2AeU2dwzWLk1XRxamUF4Ka+uLNm12bHV7P6wvzcm35TxNzqWJCTj5bUnAVu4vxxQkhW+K1OphHSr8+SRpcCyNlff5Pd/vxFZ/7yTPYe2ydHXiNhfJAJ1MpSLN+7c0XhtlyinS6HZMyfYDbEVzALbFqNhelLPVZW1sYvQUTXx/kF//t8SUZOWTT4qyvm6OnCQd1u5Gzo7YQs7zLaZ3S+OJJN2b+XMq0vtGMsZL/tt5vc4NwPbN745N4K/xOGY508a2lRRbIxb2/NyMhr+QvXPChI0DizHO0tjK9s40lEL75NSVJuk1FFMKnoEpYhf2+D1bR1a5PGNjuNub5UeT9w7Ct2Im9wqlQrEtO8uaD+ubCiXXNJjT0B9hR9Jv5mWtMm6YYiR1Oj6Krs6wd6LGEd3Je1+j62kcO5Hv4HkgPn7zcjkNNsb4M3Bkkad1w9TbP5M9APpEMKXBA94QFSukx2AdWy2TGKzTetK/98UGAZdgZq3LYBPUHk33aJzl+oo3NhNczgjM1HUwZuY0oWbYtvMGaS+vZH2xjuOn1NsPuAWmNNiZxHKAxuTie2RMTDye52PmflfT/OiaH1TJcjl30WTLC9bBvQn8hbzjah7Hjk68gIZiKTV934MKk1kPzwsepwtwZU0pjbYhz09FwLaiXIgNcD92ef2xQfxaXuZ6NJt1OW9gGuibKTk3wzrKYzPT+sd0Pau+D42ByHQ5ckrpdDm2IrJa8mxrbOJ7EolCrQc512LOQMvHThad7TRU7DF3Oc9g9fUkbBvOcWndwFYZeuz4Xc7b2Gr91XQdaK/mebY2GduLuknvATRMp4dj+6er8m08cIV/P87T/ChsdXtZz8Om3qubhOdcmqw2YZOwYrUrZwJQlMl9MNPkzzHlZuHgbUbPh5x6G7D28krMvLLsaLZYlepRKZyEafnk3gi6WgZUOtSjqwLnARqTyXQScBoZ297K6U2jDSkmJnuTZ8afpveRnt6X4maymNJjMSp8lbicX9I4CWkgNjFNnXEdV1UmE3lFm3sxjRXq4oi/weS3S09hq+JXAgcmz77hsg+hYsuSy3nf/+d3MAd4XUxjsfpX6a8CUxxe6d/3x5SJF2GrnJVtfw/pPQCzVOqPjRNGY2b+OQ7h3vQ4nVbOZ2wAX6lUZOL+bQJdFwGGeZnNOQnrcBq+HIZgyukbPS+X9fs5/i6KfnJLbMJ3IGYxVet0riZp3d/DVCioZsX6z7ple6LxhMf3fSosa5ukd8vjCf/tjF4GH8XGXluT+N7A+oKfkuEMF5s8vuifd7G2cgA2kf8mtuiV40x7JDZubLpS7+n+LtXOtEfS8G/wfUpWwNjC02/JWHXH2rGJnDZiyrxzvK48ScZWQ0+XxbBxyIylZ0OxfqLKEeMI7ASsX2Hzk4GUnMNiSpkFMsIzI2Y1MCK51xezWtwda6sOq5JFV2fqv6E0X/L8+CAj34b6/206tsIUzD+vkpOUyXu9fD9IQ/mzNHB8Wq9y6suU9JnsAdAnglkgbO3fJ2CdwJ7YYPsK6jmCWZTGiQxHesO9KtbBPUA9D+q7YZrLrT2MD2Cd5rAW4niCV7CjsMnE1/3+NDS8uebscTvVG6LTPG3eoOt+531w53oZssZjE4g5MCuCMR7XYlKyERlOJ7FViZv9+wCs09gweX4ocENmmDbyNL8YM28/xBvMLIdQSRpd6t/nwybG27VYNvclOSLMG95bvXFdt2b+n+ff73YZxWpAMSjJ8XZ8InCHfx+MTfiuwsxv1yvuZ4bpRBqT3L29fJ+CDZyWzG3wPQz/8nw6BRsA9st9P5FzfFJ3Z/Ty/RLWuVZ20iU5hTOv+7CJSdZpFzXTu1iJr/JW/zVMMTdnEqabsA78aOCsGuE5DZuIvuvxrOXssiTr23TdKrUjtmf6t8Dmfm+i01V6KN8X+fdrsMHkksnzpity3ZSlR/z7tNgA/mZPv138fqWvHnpW4BRezyuV1d2lNzXMdSvS+yfYQHDzGnLWAu737zNhg+E7MeuQg/1+dnmna5v7Sxpt7iK5sryOXeLfV8UssWqdU+/vbk/i+BZTtF5IyT9AhpyATZaLI67v9TKwOqaIv4L8fdfN0vseD9O3csPjfwsneediyoXtPd1nxvrOHKViZf9GhgPdJK/OLt3rh40jTsmR4e8cANzp30djbcjd2OTrOL+fU/+7K9ufJmW7zligu/HE2OL/dSK9a5bxq/BxFbYq/QCmkE9XmSsnbv67S2mcXnI6Nua9xstVHeuSL44WxZS/a2Bj8fmT3yyRIedyGn3vJZh139ElOT0uUCS/uxtbrd8BG5umRw3Oh02WK7cqYBPcwiHwjdgi2iYkzoGx8U6PWwI8jQ7HrK5uoavSu+6pM5cn9WKU//8DqeHYN5FT+NyYgPn3OpOGv4Jh5C0I3IH1sRd6eqcLlYXiPMuxo5fJ4kS+rbFFuEuAVcoyp7bPZA+APhHMvOdzbFL7VxpOXGbxxna5GrKm98p+HtZpz5Y8u4Uafh4wjeF4r/CDsb0/T2Cd07I15Mzq7xarPjtgmuPixIxcT/zDXU7xXn9spfURD9NgbCJW6UXdG5rXaAwan8ca8Jex1d1VXX6Vn4kBWAe2jl9fhq1Q/hCbdC+H7RfNmpi4jDOwDmMYZgL2b5Lj4irenQ7rNNLO+es0JqqDsYlFrlfnxbGOpLyVY3NvOHNWb2bwNC327s/s6XRAzXoyHaZsOx3r1O6g0Tnuhk+gMmUN8bpVKDVu8vq3FjbRvZY8B0X9MT8ZC2MDySswT777ef4NI/OUB8yk7sjk+hDMvPlg8gfuM2KToln9emmP50510trfndbj0iy9d62Z3vMlZeGS5P7s2GAnx2/KQExhMx+2SvMYNikpvE33wQZvuROllbB2slix+5bHazUSPyoZcoZhK5pF+Q7YxP2Emuk9CFNmXYJN/O7DJl+LYI6h7iRP6VqlwMk6mcfT+9QOpveK2KCvWXpPqJFOM2B92UzYBOI7fn9JL69ZxyqXZLbT5g7B+sX0WLn9aEwshmKT+BzHl6M9r7ooNLGVv1uo4fQMsxr4f55v7yf3p/Xw5vqEGNFNei/l6T26RpjmwybJ82N949vYVqUVM9+fFuvfUmVd2r8VDm1zT66ZFVtNfpzEYgvra54lfxKxCjbeWg2zBj3X7xcn61RORjPLdm0nbt2U7S0z3522KI/dpHfd8cQ0nn/bJfcGYO3IL6nnF2IwNmYrFuPexRSgh2Nt5emZcub1NCn8X92M9Qs3YVtC9syUsyBdHYW/j1krnY8p37J8lfi7c2Ft7WGYAuZcrN8dXaRZDVkX01C6rOl17goyfHkkMhamq0P2ozHnx604iB2DzXOKBaDrsDH0PdiWgCxHnHTvTP1EzNFr1mka2Hj0BczyeUdsvDaBxoLJtOQ7iy/KZHoE+nme3m9SQ3E+JX4mewD08Yyw/Ufre+O/eXL/TfKOd0xNYmfxBvVv+LFjWMf5IxIFQ4bMgGkNT8W2KrzoDcuBZB595nI2o+QdF7MMWNm/H0Fibl4h6zRKZxZjGuXLqenRGVsNPMQb7Z8n9/cgcV6TIWdbGoqXh2lYOBxAPeeQhXn85sBp/v11bHLyayr8XtB1P/Ho5P7sLqcP1jGdWiNMA70xfJzSvlBsEtWjlQKNlanC3LJYNVgJG0CcTZ6j0ULOOEwDfR/wSvJ8emzgXEf5Vky6h5BM/rDB5KMkGvgKOSNIBvpYp30fppR4gWQCXSFncez0hAcxC6X3MBPA2bEVxqpzmIs0WrR0vSG2EnwA9c8JXwozr7y3m/TuMY2o8ByNWQe9ScWANInLzCQmm/j+dqwTf4J6E9NZPA4Pe714lYbi4yHyLAGKOlfsBS3K96KYNVf2MZj+3nzYZPSaNL2TMPXorC5JpyIew2muwMlyLIVNbGZOrttJ75k9rR/HBlm109t/2w8zI30UG0CmK2W3kzEgTdKpGIxuRntt7hh8b7lfL4zv4ccmFMdnhKlwlncMprxb0e8XivhHyZgIYgsB6Ta5MZhSsDgKbVaPY50TOdpK7yJc/ndvGhZAP/ay8HPyHd/OjyvLkjKd9m/fyZAxK133kZ+BWSed5+V0HazNzFEET++fazyvHyMx/8cU1VnObz3vjvI6cmGTtK6ccMNEvlu6G0/smxmmuenQeMLfX8PTZCm6budbm/rHkG+GLSjdBbyR3J/F78+eIWMEriDH+qKXk2drepnPWVyYicY2mUWAq5Jnq1PDEaPLKsbHIzEr15swK7FFse11lWUKU1JfQeJU2PPsNGxRb+fM8IyhZDWCbWG+jMzji5P3FsDGy2fh216SZ98go6303w7HnTrT3Jn6DWQ4U/ffz4ktDg3AFLEHY3OKQ7DtN9+uEb9tvcxsiymSf+D31wOOqJNWU9pnsgfgq/zxgnscyUQYW8V5CDN1up9Mc2CsI1yhdG9bzMztIUwLeEKGnM2wTnGN5N4p2ORvab+ubT6NDZT70hhUHO5yZ8YGlrmDm22xI1fOouu52ntjmu+chr8w213P0+UobFWgGLiNIdF8Zsib3xvnx7EJchHHZTDLhx4ncdggMh0ADvKG+m2SY/Uy5MxFqcOiMXA+HdOWv0reBL44HqrwvP4dTHt8mcdre2zC26MsGt7qJ8oXrBO4kbxTHaahq6f6jUn2o2IdwpvkreCOwbztNu3caUxyc6xTus0TL6N/yEnv5J2hmALvSBoDivmw1ZceVyWY+FSWtEytga28zJ8ZjsVIVmhbTW+snj+GKe92oKsDtn6YUqrSsavXiUFJHe1Xen4V8KfMsj0fXY8W2xVzdLawXy/kZTunLI1yeav5dWqWOj02qMk+6jd5d5ki//16bjImgeUy0EPZrsq3cv63k97TYu1AkXcbYxO2wjqsTnqn5fs4zDfN01id3srTKCdMo+k6oZwGa9veBe5L7le1ucOxtnLO5F4frG24Ems3s9rcktyDsPa/GJgeREZ76+8ejyluzkvuzerh+RVuQZUhZwGSs+09vf+MrdzXSu+S3IWwydFTRVpjE8Ae+1tswN+/dK9Q6NTt356ktACB9eM3Y35sriTvNIZlgIuT60GY5cUErF3YErOErOonv/BL49f7tprW2CrpEBpK/KJsv1ezbC/CxEe6tjSeKOcZNua6E5uUzo8prdYi05FqImsGzCpkKUyhs7HLWprkCO8MOYM8nY+lq4PWjchzWvmFg0G/np6uWwo2wE95aPWDWakcg22D/HGN99b2+nYstu3sdb+/FHlbeoeUrr9QomP92w/IUNyUyxE2JrmYZPzn5TzbgWISln503aaQ60y9aR3A5iXjMMV5Vlpj7eVwTBl0PGZpeCHup6hu3KbEz2QPwFf14w3fK9hq66eYQqCoHPNiHUqu4531SbyhegeSNmbLkrc/fVNvHE7A9rUu4A3jysCu/pu29v7Q6JAWxDrLl8g7Iiw14Z8TW8F5DxtoLY451MnVuB9Hw+NusaJ4BtbJrYgNNnImOWkDNhTrtP+fN9yrYQO3nLid5P/zTBordnNj+9TTVa8elSXYwKVpmcE07Z8D22amUXHqyG00PBYP9XS6ERsw5awEnoENHM/xRjZ17ldsqclZdTsfW7m/gtKKLzYYy8oz//1bdLMHFJuQPZuZ/ydjg6LxmAng2NLzc/F9rxVyxvbwrL//j/0z5Nzr6TyRlQY2obuK/GNLf+TlZaIVMQ9TZXpj2v3nsPbnaEzBcjrW9hVOPbO2Fnj+P4Bts5lon6XXvYMyZb1KN04Esclv9nacpJ7cSMnPBTZo2oW8PZzd7qv1vHuQvLakpzJQR4FT5P9WpfuFsrROel+CKWzPwQaQ6daA7PTGLGyKU0oK5cQIbCXpKmzy3qPD4UTWRG0l1q9cS7Jlguo293ZM8X8VyYTCn32zuzrURM5EZQSbLJ2AKd5PKYe3Gzl7Yat+YzGLwg2xhYt5MeXXCuRbp0zUTmJ9wFnY5C0rvb0OXOn5N87rxbpYO1ucGJCzz32ifjJ5tgaZ/RumQLyt+L9YO7Qeje1v2Suu2PjlI5JjF7Hx1yXY9slryTtOuegnz8fGIXNglnLnYtugTq1Rtq/HVuyvxxXILu/KmmX7re7Sk5rjCX9nY/983dN9Va8/l2H1+iUSU/EKWRNtacHGwZd4mr9aVV+8HF5JV2XgdHR1Yvxyppwf0cMWC49b7nh+c0+jnWiirAH+UTedXN6p/lkzyY8eTwvxuF1OqW0r/eYKMrZAu6zDsEW37f3eCGwsUMx9XsxJJ6oXV3Kdqd/r5bDpIimmNN8sM26Pkijg6Kqw7oNtycjKtyn1M9kD8FX9YMqEYg/RzCTnZ7cg66e46Tk2iLkccwp0LpkO6vzdJ2nsGzoHGyT/GuvgCm/FdRyvrEbizZaJz8B+nLyjnXbErBImlOQth1lfnIOb9GXIOoDGecBFIzbCP3fiA9MMOVthHeF3Ma3xKn5/MW9gDyLjbGdMcfQINgCcgA10Cx8RWeZa/tvdaDjPC54222Ca8Rk8frmm992dOnISNijN3Z96ELbaM7+XyyM8bb7dXQPejZw9PZ9HY/vs9sRWM9byeA0HTsqUtSN+9A+m0NjWw/NtbELxtZyy5L9/GFP87ISt+t6M7VPvh00CF86Qsy9mRXQpibdmGhO36ag4c57mp7Ic7XFZkxptQFKWrsMG6XfgZrdJfZmBjK08mFnl15Prhb1MXE3DGiDHoqic/7t7vIr870/+fvDd6bpKtym2l39/zJS3D5lOz5rUk0u8jE+gxlnVXpb+jbUDyyX3C3P8wVQrb6rKwFBMiVupwGmS/4UZbx//PwPJ3F7mafuQ59NqmMXW09jAdnpsZTDHf8Y+WH9R+Jb4HolisU4ZZ+K2cnlspXQdGpPKyq1BHrcHPI0vwRTJe3h+jsZW3e/OkHMwNjl7hyY+jsg3kx6IjQWW8uvrvEw9hU2css88Z+J2cjtMQbgLpgTKWpX2OvI4pqi/BtuTfjhmtl0oE3LSutxPvopZhhQLFDMAF2Sm0f/hkw2sX7sdc+58fbP0ryhHN2LWbq9j9S3dejpzTt4xcT95FDYpLpywDq0Rpr28TA7HrFuOx8YqO9Foc3P8Hm0NfDfJ/32x/v98zHJhMJnjCZcx3sN1ALZa/hKNMeviLnPxTFknY2OSBelqpdofP22CPKvH5bzevYEp7grL1cKH2cbktZcn0HCefGzp2SDP06zjjGnudHyv5PmcZIx1sHZtf2wOcGQ5zzGl1/tUnMzh+Xatfx/g8Vm69Jus7c+eTsV2zgu9XI1Nnm9Psk0kM//L8ZoeU6KenSFnXkw585DHs9g+MY/HtR8Zllz+zmnAjUkYxnidKxQ6cwCH5NaXKfUz2QPwVfx443mZf+/nFesOfIUd26ea1flj/gPewCY3W3jDtgLWOd2ONd49dmre+AzAnJHsg5nxfYqZRI3GVqnvpJ4jmEHAH7G9kZfT1cS4mCytQcY+df/dQ1hHez+m5VyVhiPE3MFNX0+nYvK/H9bpPucNQj/yzG4HYpPABTFnTPdgA/kjaqbRAM+7pZN715CsVpHnGXoANlDbxq9PxFYoHvH4Za1KJ/K6O3VkPNbJ5W4FOQy3bPHrWbEtNReSbKnJSOuf4YokTOHzBLbieQE1zcmxzuYI/z4eM2k7HevIj8mUEfx/b5TcOxH39E7FEVXJO/2xCcAB2KTiYWwAmB6lVMcJW9unstCYlIz16zM9XLX2Sfq7W2FWROmJJ328ntxHxiTQy3ZH8t/r91v4fkjPr7s9z07HvXx3oJ4ciw14KtsCD9NznlZ7YYqpK+jqtyDLIVRFGRj6Zee/v38CiUIM2wt8BLZKtXKmjAHYXvvUMdytdLV0yFJQUt1W7pApJ2D95cZ+fSG2tedsrC/J3Qfc1/NrJWwS/5mHJV1JXpu8iekMNBxmjsRWTUf49bewvjNr0YLu28kzKE2aKuL2CIn1Dda/PYBN3nNPUBng9aKnfjKnLSmUD1dgTvJuwlYyh/j9/bF2KedUj4GYv5sl/Ho1L5NZfndKsrrrJy+lnoJjAKbYKNrKizFFxZleJo+rIWslfLst1p7dhSnLjsFPfKghawTWphTWoCtiE8vncL8eNWSNwsaVD2Bjwn08vbLHXYmsabE+YF8v36952bjLnw+hwrIXmyj+EGvH58cUpnv6s0IBPzSzTFU5HZ8B6z9zFHDjPY2/4XUttaDph016q/zDzIy1lYWC8lgaR6o+jU/AM9N6JDYWGOzXL3qZ+q6XgzEe/2Et5P9E26VyyoOnw+meVhe4vBuBR2uWo74uZ0u/vsY/r2JtcNZYd2r4TPYAfFU/mIY+1Wavj5nfDMT2TmYdLePv9sNW3n4H3JLcn84blaGZcjbEBjc3AE+Xnj1BjSNdsJWMW7BVhIuwQe2RSSO7LPVW4C/FNPmrYttD/kA9L/PpsVVfxwaEr2ErXvNjqzm7Zspah66rnKOwwdrluJOyzEZ/KDYBGU5Dk7kPjbPDj6TCrJiue0h/gnUeX+xrxJRMD5FxvFAiczN6PnUk11nh2piFSzpY6osNnL9L6TzkZnnmZXsTv54BG1gO9evNPW6VTksTmWO8kT+YZPUQ259YeTxnUo52xY4G3MrD9UNMAbcvtgJfZU5ayNkU830xFBuUXoDV2Q2xyU72gIueT2WpPNPbZSxMyas1Njl+AJjXr+scY7sLVl/3TdMWU6QNzXi/H749oYf8zzk6q0jvA7GJ/7VeDopB7rKe/3NXyUpkblJRT3p0EJqEaW0aJzKM9jLwY6xNP5FMb+UZZWCZzPzfo3SvnP85K5xF3LbE/O/s73F7E1NYb4kpqnP2hM+Ctf1f7KHHtq2d5N+PLIe5Qt5pWFt5A83bysp2yf/ugO1zvxT4Ow1F+VKYcmHxzPCsRNetbRe6vOM9/7NWN9PwYROPWZN702Lm20MzZSxEz+1kj0fFJr8/GltRHOP5913/fjaZK3ZY27gX1k8WVjvlfrLSGrAkcx5sy8V5yb3psYWByjTCFkwKx5JFH3wo1t/lbk0oytE6/t4uybPsfrIka23/Oxxr56b162W9bGe1b17nnsUU7dcmckZh49Ss0y/8nSHYgtkYvx6MjTOXxNqALL8+/u6iNE5jWMvDcmeSF+eReYJFIqM4jWVLbMX6VTIny162j0+ut/QwZTmYbSLvNEpWAzScjq9WI+9+RqM/WgmbtM+VltfMtLnP69oxmLJqjOfn6WQ6GnVZwzAF3lKYX4oPMYvA6bD+KvcUq8Uq8n8CNbYVYIuCRf6vjrXnz3udzLYY9zhd7eXz4eT+rlj/WesI8Sn1M9kD8FX7YCtIY0v3ik7yOkyzVWm6l7w7M41B1ki6OoY5ET/ep0LGSMyqoa9/74eZt22IaWBPBW6uGc+hdF1BWsEbp+exwcG7ZHSUyfujaRyhtDem7bwe65Cyz6L3OL2JDdqOSu6vQ6YVhsftHmyQuww28Jvg6ZbdaSfy0r1Wc2GD0WUwJczoinenS77PhnXUJyb3BmEdQd3zfds6dST5/bqYue2ZdF2p+jEVq+/N8oJkJQpTvr3WQtyWx1al3yax3vB0qjpJYWjyfU9s9eBBGh7UR3oZr1rZaOaksg/WyW7gdeX/WihL38JWXWqfykLJOodGuzI9Zuo6PjMMy2CD62VpbCs5CxsUPep1p9YkKZE9JA1vbv7T1aHnKEoTGhqroNltkr+3ANZ21K4naRkof8cGTk8A/6XifPAmcg+g+ck82dYuLqeYINfKf38nracbYBOjO4ADi+fY5C13xXwAXRXwS3r6jMHakrppNAe2onxSci+rraSrmfUq2CrglXR1ZPwmFW1JSWbZMe8MmLXh52Qcy4gp186nq4PntExdg5/XXiNMK2DKktrtZPLbBbGB9s1YW3m+31/ay0PVXuihyfd0b/toavST/s4ymDVA6uw09YNUO42a/I9veJpVWl8wsRO5q7B+Mg1fZT/ZRG5oIr8/Nds3rG27EfOndXghE1MGZ48B/L3xmLXqLV7vCuuHU6lxnKK/M6JUtvfGlFz3Y4qZKgeY5SOwT6WxdfEurO/6ETUU5yV5R3lcezwBq5t3twI+xrYqtOp0fGVK1iiYkrpYmDmDDL8A/tvlMaXgc3Q96nMbT6tsn2rYVrCPMCuQc5P7m2H9Q9YJVO3mfxN5x3ma7YFZqmyHjelyFOeFddMwrJ27A1PAzen358GsoLKseqf0z2QPwFfpg3uepZsVNWxC+3d6cICS/HYh//312EpLcVRcoS2fD5t092iqCixBw/zoARqTiGUxDfWtmNlcZZgSmU0nL9gEYF1skJR7FnofbLDVH+twD/PGejXM+iH3nOh1ku/rYsf5/RVY1e9dQzIRz5C3EjYpeQKbKBVmancA38x4fxVvyNb1vEo7//OwTjzHod+d2N6zVCmRdkTXkTlIokOnjmBbenZ2eQOxgekhmDb5Jf+bs9/1WBK/An6vL40BU524pYP1fh62szHTvUewAW/OvrsXsJW+UfgkB1MwFfXuFjJWlD19lqYbxYOXqxw5i2MdYers8gTMm3th+pprDn4ZEzuqK+K1MNZZ97gqgWnqX8aUGr+i4XtlBkx7vycVRx+WytGONPbwh9LfOvl/HiWrL7oeWXZjDVlDm9xboYV6sjs2EW3q3BRre3O8cO+MObtLlYsnY9uysk/mwfan747V+S/qWZ38T37/WPm3dJ0QXp8Zt2UwZcgyyb1C0XEdNuHKPW+8aJdW7CZMWeWJ5u3SBh6nm7D2LactWd5lrZLc60NjgeEi/NjnCjmb0tWZ8jw0rHj6edl8nrxtEztj7VJhmjwOm9w8gSkDc9vJeTBF21zYKuSS/r1Y6b41M61fxPqMiSaw1Osni3bpVGx1dM5S3i+J+eXISaMVsEnnmi4n7W9HYCbhOU5076SxRaU/NuY6zO9/H1MqZy0uJWWp6TYibIyYk95Fm/RNvx6J9d2vexm4kUx/VUW6Jt/nx9qlZWiMM58Cds+VV5Jd9sn1v+Q5rr2gFK41sHbk7/ix6uQ5MZ8NG4NOWw4PtnB4DfkWQSPpujB4OWZFdSA1nI4nMuak61hpF2wiPwcZJ3N4OS58SgzExrupkvgeSpaMPchalMZYZFrMguJx3KoM6zcrZVEaNzDxNoes/PffBhrt7BhMSfbPJP9zt1F/UZYwXzpnYdYhJ2NjnSdy8m1q+Uz2AHyVPpgGq9hftRzWebxCYx/uEiROzCpkPeYNfbFCfkTyrC/JUWYVch6i4bvhQkw7d5PLHYStmA6tEcc1aHI8JY1B4BLAB1T7dRjSpMEYjSk9Ls4Nj7+3MabEuIiGeesgT7cPsIHJnRlhWgQzYboQGyQNwyaVhWnZhphmu0rO1lgHNh4bpF2KDeSK0xR2xwZKVXI2BV5KrmfDBm2jvAyMwZRBOYOkjpw6gpmBfx/bWvIONmFf1fNzOLaCs1iGnA0oHddE1xXKpbHBSO4AcB9sxSXt9AdjnfdmZDjRwwbIL2FOQCeaxHhduTsj3+bz8ngOpgwqnAENxyYUX8M67Co5W2Krc6d7Wt+I1b9V8TPdc9LHf7c5NuAu8nw6D0c6qcyxcngF9y2Bmdlf4GmWvX+3m3L0PboeLzWuRv6XT8GZlq6n4MyHtac5stbGlK/bUxp0JPVkgww5m2OTwG9hbdDS2F7gGb0MDMXaq7plqVAsb4IP1DPjVS5L12ImpamiMtf51kZ0bZfGeH4tiU2cZiTjTHa6Kqc+9PRJFYPH+P2c+JXL0/fpejzcwlhbVRWmZu1S0aesjU3qdqN6y1O5vT0HG+QWE4ChJFsEK2SVnSnfgPUfZ2BtyhAyjnVrUpaKLS6ze55tQZ6j2c2w1fALsNXWq0lOQPG0fjEjrdP29rDkfjFp2pmMftJ/W26XJngZOMXvDSRj1R2rt69gE71H/Psu1PQxwsR999c8nRfGjp3bivwtM03Lkj/r4zIfzUjvcps0Dpv8DaNxrGOdrZOpM+0lmjzfgsz96jRx8E3Xvml+ElPzHuTs6WX8NbqeHLYtjfYyx0p1B0xx/DJNHOZiE/pcR+E7YOPaF7Fx17exif84bOX93Bqyjqd02pDfH+Xy36Ri8u5pcRemRDqgyfPtgSczw7ML1nc/RLJ91+P2hocpx4p6H+CZHp7PDzyUIWdDfBGxKEf+dw0aTlAr/aiVytLrdPXvsghmpbIDmduop5bPZA/AV+VDwxlZse/zVaxD3A3ztlpH67sWyaAG02B+n8YetVnI8E/gjcxDNFYzfolp8A/EnBb16LSlG5nP0/CgHEiO5PF7G5E4s+tBzk2YwmVsci9g+5yKCXzuZOlRbJvFJUx8DFo/rDPPHZR8C5vsnURX8+s+nm47Zci5noYzr/6YF+DrSFakcU1phZzj8LOyMSXEnZhW+ywaHp2HZaZRR04dwUzXCmeZw7EO9yUynZQlct5OwrOlx+nXwKlJWch16vUxNlA6FRt8VaZtN3K+jx0vtgCmEDqhyW+y9sphlgz3eVk43fPvChqD5RzTxvtoTCT6Yvudn6Hr4Ds3POfQcFS3k5fHjzxsuWVoGeDCpFz/zvNuQWzgkO3Mq0k5esk/J/q9gTXyv7tTcM6gYbKY5dQLW3243t89jxpO00pyHkzy7jRsVfunng+FUqCVsnQG1qecRcM/RKtl6Vl8wlUzbt/GvWPTOKHjOU+vjWqEqbtJYNEGDCV/L3+P5amQlyGnu3apbvvWdntLz86U58SsAG4n05S4SVk6E2uXLqOGyS6m6NsgKUtnY2OLdK95TlqX29vjS8/7k9dPLk1jq0XaLo3BLM6yt19hE7/CT8EAL9v3FmUyzZsKOeW++26s7z6dGs7ucssSeePBcpt0n4fpXDIUSU3klZ1pH4FZZhZHBi6GK60q5PTk4LtP8puqLZSDPL9nw8ZxN9I4aSRVVFbl3SBsYr4wZip/OzZx/jpmcVqnvg30OjsGU7huiY1TLqPRD+ROcA/ATzBL7s2SfL+h/LybuL2HWRWsgfXb40u/WZyMdtfj9j6mqFzQy9PR2BG43/DfzE71tqdBHo4iPZbBfPLsjikdi0WYqq1qg4B/YScqPUyL48AeylKlA/Wp/TPZA/BV+mBmgOdjqyXpHqKRWAeea2YzGhs09qExAbkYN7PzRrzSg7Y3iBdjE9EbgR8mz5bEVs7rOGA7rCTjSmz15KfUOGnAG8a/YCacZ2ETnDlbTPNDgHv8+87YwGTzFuQcmsiZw+Vci2knd2pB1sMknSm2+nYv7mgqU87G3qgt5H/nwJREp2NKj1xT97ZPHaExuL3Ef1+YAV7n5f0h8q1vFsQ67PMx5dnLmCJqSWySU8fHSOEAbAy2OnkTNphbI0nDHGd1G5A4AcU61Udxs25PszqD7gWxScBgrC7/FtPkL0H1wKhYfToZWwWcMXk2FzagH1+zTBarJLNjDiYXcFk3Y35KcifcQ5N0T4+MnMXl9DiwTcrRxU3K0cZejraoEa+cU3Byj0Edga1AL0RjG8/lng8jsBW+Hrc74V66sXZtAjYg+rOXh5GYQuAB6p3MUS5Lv8MGYEtQMUmtKEujMZPNE2qWpZUxhfly2ER+Zg/b3tigMmdL39J4PWfiSeD3yLRSyyhPD+aWJ7pvl5bCFOkXkqcoqWpvN6fG/mt6dqb8JPWcKTdrl17wsjQ0I637Yoqj/enqt2Q+rF3KGgeQ0d7WiNPQoj5hdbfcLt1OXrsUsHp7BO5Dwt/dHFMIVG69SORtxMR990xY/b+P/L57UeDyirKU03f3wcYNE2jeJj1IzeOHXXbqTPtczJl2XQvTHAfflRYimILsav8+BzZ+y3bsncg5FbjOv88M/A2bJJ/mdXB0DVkz4UfYJvfGeB28rkY56EvzE8yewSyEhmHt8ZIVco4Ebkqu58HGktP59YLk95fjcOflmB+eTzBF9b6eTrmOoo8Hfu3fR2Djk3Mx5ft1ZFhfJXXhHP9+kefbuUk5Wo/8E7qalaXLy/Wqbtma0j+TPQBfhQ82MF/TK+Op2GT0DXy1Heu4c02I9sMUAQNK9zfyBm0r4MEMOQdjneEM2ER7ZRIrCWxFv1JO8vuATdiew1bqb8AGcYOwwcHPyF9N2gLT9I72+E7wBnt9TBOZ1dBiHeypJI7tsC0dz+Dm0+SdxtAXM68tLEBOShrKjTDzwDrnzg/EzHgPxhr5Icn/eZR63mWP8/DcStfjBp+mnnliR04dwbY03INNvK4ryhBm4nlaTiOLTSAWxDS/75OYxHnjXTkATH4/fSn/x2IKhitd/p9J9jD3IGcYpdVszDHRQ2R2jP5O0XkNxxR5a2OT059hndQt5Ds8mxOb0G6PDYyLPY+zYCaLdU5RKY5QOhRbHUnLwqNUKzkOwx09QVdfB/59PBlnTCe/3x4bpB/XpBydnlOOSnFr+xScQlbyfSQ2mTsW8+/yHyq2mSVpMxZTvl2O7SdN92F/jzzz60JWJ8rS7JiFzHbtliV/bz9sgv1oqf69SJ4DvWlpWKItyMSTwDtqlu9vYu1Sy+UJa5fmxSZI7bZLbbe3dNiZMjapLDzwt1OWVvffr1UqpythCyc5JuXD6EB76+/1petWt7Sujadeu7Q61ndc6HXubr+/GFaf6yg6jqb7vruOQ88F6aq8ScvSOzllyd9bAmuPrqTFNqmJzNG06Uybagff75B3EsZKJONGbPHlu9jkMms7l783O64UxRRnJxTlytOv0n9GIqsfNlZ+gq6+XQZ4HcrxCVHUr8NofoLZVeSfYLYEfooX3j5hbfjC2Fjqe2Q6CcX61/sxS8cnaVgJBWyxcEJOfcGUNndhytwf03Ds2x9TlmRZUmNtW7rNZbSn+2897X5K5pjZy1Lq96ylsjS1fSZ7AKb2j1eG57GJ9l34EWHeWNzulepZ8vZwj8I6+GKvfbEiMMgr+48wR2g9mqe5nA+wxv0WGnvur/dKdSRm6la5170Ih/8dhGklL8L2tg5LfnMF3Tgf60Zm6jBtaWxCf7I3lJV7pZJ3i0FJ4YAlYBP526i3clNYggzCNJmpQ6dLcXPxGvKWx1ZyLsAsMr7l1znKoP5YR9Ef0yCfia0Inotp88/Ez1LOkNWRU0ewAe3y2KB0XS9DX6dhUngG9VcmBnvez5TcG49rhnPzrPzdr5fETJWvzZCzGd2ctuBl6ZdUHBGY/H5gkr4jPc9/RMO0u8cVhEROsdK6KjZ4vBTTwG+MraLd00IarY8Nkn+MKQXXxJRAd1bIWAz4zOv4RFZW2OQp+1QXGkqXVTEl5VatliM6cAqO/3Y5Gg4mQ+lZf0xJmbMX9HT8iCx8EuF1ZR/MGeJpwK05aYSt2BWTraIsvVWnLNGYJPf19L4PGxS3UpaWw9tUzDJkX6zvu9PjeHZVWUrisgJdJ0lpOR1P5iTQ0+lr/n0FbOBYuzx5Oo2mUXeXpqs58Xgy2iU619521JkyXZV/M2F9SK12qZRHe/j7V2JjgkU8rW/JkLMZHWhvsZXeq7Bxw00k1kNeLhaj5mlT/u4amHJjdXxl3ONWaV1IV98tS3o6/wBbba3bd59ON5OpOmXJfz+CrkqXY6nZJnUjdxps0teqM+2OOPim69bZvjT6mAW8jGyfGZ6JtsaW6s4VZB5hWnrvQM+vY2ksXN1Esn0xQ956dH+C2V1UbJ2g0ZcUfgWKNDobs8a7iMwjjEtxW4WGFdVov3cpcEyGnLRMrokpX1Ll21VkHD9LcjQpNgdI26qlvBzl+qlI3+2XpFetsjQ1fiZ7AKb2D2YKUxwptzE2kS+0m4tiyoUshz7eMBfH94zBBjH3eSO0MNaJVA60vOEqtkdcTGKqhw0CDyZj1TZ5ZzrM8dNiyb1Fku+zknFkETaBnJluOhtsP+4fyTOZnZOuxy+lDVx/TJHyNNX7t2bA9mxtkdxLHZXNglmbVMVtFmwlIdX6D8JW9L/leXsSFdpNLzMP+e8Lk6t+WMf/Lcwc91gyTKbp0KkjNAa3dwP3l54FL6u/7O7/JL8dSZNVC7p2/u/m1BevD38n0fCX8m0Atnex6pjIIo26/Z+4iXCFnPLgtthHekw5zSrkFJ3WFZgpYB+vfwdjHf+12CA+5yjFZmk0PbbF6CxMQXUR1dYJ92BmwJd7XVggyft+mKJi04zwFGn0HUy5uU7p+fw55ch/25FTcPy3S3r5btpWYO3W7zPK96KYlcQNdF21X9bzrdi/3qNvCGwi9LCXgR/gDnkx5UudslS0Jdfi/QY2AdjR8/76GmWpaRph7fBm2IRpN6rN5ou25A4PW7qK3wez7MiaBCbpdDW2H3/X5FnwupTTLqXpdA0TK5QWJKNdorOnPHXEmTKlNhc/vtLz/94aZaloS/ZN7g3H2ru7PA/ur4obHWpv/XdPYhOhJTDLq195GJbw57ntUtF3N52QYUr9X1LdlyyITVz2ozF56+/h2M/TPLfvTtuSeZL7xeTm1pyyhCl67sL6jhdpjAvnxOr/rWS0SYm8BbCtSWfQUMDOSWvOtDvl4HsTT/cjaLItEbNeeo9q3wljscWfqz1sC9DVYm0tbGEnxx/XEh6eu7wODsEUAidjSuEHsO15ObLSCfY6no9/w63lyDjBDFO2Xon15fOVns2HKYNezgzP4p5O12BKgEJ5ezjWbt2JbVuoyrcije5uVrc8vXPkbIbNP5paj2HWXD/JjNuKnk73ldPJn2+XU5am1s9kD8DU/MEm0s+Q7PHBBujH+fdRVOxvS96bCfgTjT1A92GTiO2xwdppfr9qgjwP5sSnWBVZExsMfaeNeN6BDYYf9AZ13eTZQGzgW+nlHeu8rsS8J2/S5PmpZBxZ5b99HNihh+dDyDCdxAakl2OmXluXng3HBpk5cXvbG5rdsAFKl2MM/W+O+ddDnu8LY5Ov9TFTwvQYpFxnlR05dcTzvRjcXoTtx7+dhnPOGWji5bmJnHcxq4Etmzzri028Kh16+u9Xx47N+9A/hfntUv53RvJW3W6hcbzREtgpH/eQePInw2kSEw9uf41ZyaSrnDnnHn8Xc3K1Dabl3yd5Nl2unCZp9Csajr3mxkzO+1SVScwj9LPJ9WnU3HPfTRodhnlwf4DGBGDmnHLkv+3IKTj++4fwySg2iFwPU3qkx/3lTHDvw1aijsEGMKkZ74yYmXelGbCXgQOwPmFFbFD1OollWmZZKrclG2Ary4umadViGm2IKUiXrVkG0onyBdgA7TYaTv7WJP8M9TSdVsKs235CY7vbqJzyhPUBRTpdia2MfpvGfuUVyDsB51o60952zJkyPbe5hclz5Z5purYlv6bRlsyOtf9DyGsnO9XezgV8N7meAbPgOQEbL2X1kf5u2nfPStdV02JyO3+GnAWxCfC72KRxS7+/Eo0xQG7ffR+NtuQ9krbEny/fLE+byHkMM9ufHlMk/gNTUi6GKaqHk7k1weW9RENp/gamsAiYZUJdZ9qdcvD9GNYHXEBXb/zpAkNOmfoltuB2JtYmnYm1c4M8nU4k39Lhh162b8X68RkxZe4M2BaPZchTLK2DOzumYSncj/onmP0IU6ych215HItZrs3pzx8hf9tEOZ3O8XRaFLPQ/uJkq8w0usXDNBc2nyiO6z6aHsb4iZyX8fkE1u9viLXhY5P6t2pm3LpLp3SOl+VEc2r8TPYATO0fb1DTfVtL4fsasU4h6yxX//2c2OT9j8ALyf1h2OCp0suoNzYz+/eiAVoY09zv1EL8NsM6kUFeyXfC9kw9iE0ABpDhBBE7SvFJbOCxEzYx3QVbKS00nLnOYLYm8WaLWQGMd5nZnl0xk/2n/fuKmOPKc7DB3NzYhGvbDDlLeIO8lef5HZi1SmGpMg8ZKwCYRvbx5PojTAt8JGamvlWNuHXk1BHMtPlRJh7cfgtbQTkgU87ynv87eaP9CF0nXLNSY2Dj73zT03ZzbJ/cH4EHarzfHzMrLQa4r3qaHeBpf2CmnNE0H9ye7H9znbFuCLyYXK/qaVY4CBtJTcdZmEJg7iSN/kw93ynr4ubH2ER9HLZCcmHNcIwGnmiSRsUEIMspZFJP2j4Fx3+7MqZsWYmG46vCEeNr5Dv025zETw42GLqKrk4Qc/bxD8W2uCxdur8rNgCvdMbrv18DeCy5TtuSt3Pj5e+u4PW+nEanYIqOyolNki8P09imVm5L6uxNnh6biJbTaSdsS0CuyfWqwCNN0ukwbFK4WY0wHUpnTnnqiDNlz7cnPRxvMXGbOxf1/PlsR9f29i/U255YtLeF0qWl9jaR9zSm+JsGazufwiZ+D+OnGWTI6K7vLrb2zE29rZNbYMqktbH24zOSviFTxmY0b0uK8UTu6TCzYH13ar15Ctbm3kn9rSAHk2zXwPyVfLuOjOTdTjn4PgjfZoVNsj/EHTInv8lpd5eja3s5k8f3DhqOQnP7lENo+OCaBlv9v9PLa7Zjz6SMb5TIWpTGwkJ/bCtLznGhjybXH2Jj3IdptFdZ44oe0um2mvlWTqMPsEWqZ2j4UMjJt2F0dTL5nOfZldi4IsvyJiOdxlNjjDK1fiZ7AKb2DxOb2g/xynUKNTuSRM5KJKtr2KrNy62EjYaGfRts0L1UTRmb0TCXTTX3Z1PvOKY7gdX9+7HYoHZ7l/PFILOGrGs9rQ/z9/d1WfeT7zl3P3wPG9Y5/gAzyTwTcwSUNZDwRm2F5Hov7wguxBROr5F3dv1CdDXXvDh5tiP1jy7r1KkjF2MD23YGtzPhq1pJev8CUwJNhynMsrfhuIy1iw4AM5f/DPirl42+5HVIK2ErGyuTHKGHTVLPJ98a4Ckvf80Gt1ke3T09v40p6QqT2XtxRRKmyc/ywZDI3KinNKp4d0OatBeYcuB+r3N1Tol5ytO0WRqtV0POaDpwCo7/dhTWVp/oZTAdMO9KhnWS/3ZcUnf7Yiu3t2GT0zqrpcHjdgUlBZun99GZclYtygrWllySPKvVlmCDyDSN7m4xjab3fLoFa0teLZX9y3LLE6Z8383LcbN0OjZTzo703OaOz5QzN9beXk4b7S0ddKaMrYqun1yX29zHyGxzaRzp3FJbkshZCTP9b7m9xVZtv06j7//I60txJOKxwJGZ4RlG1757X7r23a+T0XeX4vEU1gYsgG27+wVmLZjb365G17ZkNC20Jf7+tfjpLl5Xn/X7l1CyyKyQ0xezSEq3ma4KPJNcn02eJUBHHHzTcJI4X3JvX6xtma8Id2b8Rnl4vk1X32CrYWPVXN9HAbMCK7YFHk/DsecqmAVVrhPNI4AfJ9e3YcrSH5DhVyB5b0GsTTkSO9XplqSsvkg932czYYrtcjqt7umUYxXcUxqt7Gm0Yo0w3YG1KUcCJ/u9uTzO+9aQs6DL6i6davlRmxo/fRCTlOglzr//O8b4N2y15WhMe96KzBdijM8ChBAGYI4dx7cSthjjf/37bZgmfnhNMS8B04cQDixkOScAo0II43p6ORj9sYHaMyGEQVjntkaM8SbMpO9P2AClR1xWP6xh/Qgzvz4A0yBfgg0k/pEjy3kV2CaEcDFmFnhAjPGHMcbDsS0Qy2eEaTS2ovTHEMIQgBjj5ZgC51eYxvUvMcZHMuQMAf4eQpgW6ziOSX4yBBugVBJCmCuEsCbW4f4DWwVcPYSweghhemzAO2uM8Z8ZcpbHOuj3sUHbU8lPlsYsQv5dIWdRD/unXhaIMZ6MdR4B2w84IMb4fEbcFgshrBdCWCTG+ATwdAhhC2zCcyxWvj+OMf43rZvdycHOLB6ATZDX8nwA234xb4zxPxnhWQkbTM2CpdOW2NadP2NKvCUy4rUhQIzxghjj/3m4wAb9c3sch8UYX8+QtXEIYQ2X9xCWRltj5aBLGvUgYwim5b8ihHBxCGFxvz8wxvgh5s9la2ySmBOepTFLommxNPoGXdNobJUcl7UfNhm6Mcb4eVL2HgVmDSFsZdGO38uUNQJrRwpz+eOTn0xHRp0LIewL/DPG+Ab2z/8bY/wNprAcAZwXQvhahpzDsBX2hzBLs1+FEHZNflJYiFTJOQQri+/5rWexgXtBnbbkEEw5ch7WXv4EmwwU5KbRYVi7czo2IL4C2+5SsDTWJvXYlrisg7E+8Q7M4uZXnpcFfTEFRZWcA7HJ4ht+61ms3y4YgpnQ5oTnfGxF63WsLVqthfZ2FDYhWozGCRovAbOEEH4aQjgSW5E9pnspX8haG7NS+VoIYTB80eauQqPNHVTV5oYQ1gkh7IJt1XgGa0s2w9qS48hoS1zOui5nXn/nfiyNRvtPctvbmbH9+2v7O3tjCy4nxhgf9bjujCm+eiSEMA+Wv38JISwC4OOIb2LbOp4B/pzRd88bQlgxhDAf8DGWd5th5eC4GOPcwM+r8t9lzY2Nh/4RQljI25IPsfHJDOS3JfOGEJbElBDFqRDzYvmGx2+pKjkuK3j+Xgn8JoRQzCteAIaHEOYOIRyPbe/7V4asiCk5DvEwrYI58Pt/ntbPYivxPcnp4/3kcTHG90MI0/ijO7CyfVIIYVBVuSyIMX6C1f3RwGYhhEVCCAN8HP49MtPK43Yj8DNPp/swK0G8rj2K+RLK4WMsyc4PIdwM/D7GuBqmdNzDx2Y5fOD/dzjwf1gbRYzxR5hyYO5MOcQYf48pOuYFNk/S6RksnRbv6f0k/6+JMf7Ux4P300ij73lYF6qSk1zugo2XFgFGhBD6xRh/iVm7zFgjbu9hfe8ImqfT6FxZUy2TW6PxVfxgDcapHZATsMHcfu3KaeGddP/ZMth+uR9jGtv+WEX9KRV7pWi+H3Fo6Tdv4NrKGrLmwJw47llXViluy2KmobvQ2Os6DebkZdEKOTNjjehzWEO0XRP5/0PJ63c3cl5I5Gxfej7Cw7N4Rhp15NSRJG7fwxQ4X/f719A4KeSnVJwU4nJewcyTv4gDXbcJ/bVKTiLrZcyJz48xjfK8nudv1CjbhZx7sQH7SKwjfAPr3I7F9vfl5P8r/s7znk8z0dhLOhTrzKvOhh6CmRC/jq0ejk2ezejhepeK010SWR9hk79LMWXXQE+vlzLTp9gqdYCnxZ7YFqejvAwN9eeV1k5JeN7EtjfNUdSxpGxXppH/tiOn4JRkpQ7PhtM4z36op3uPdS6RMzq514eGWX/hFK3KEeMobAvAXMm93bDB8d2YwuqdzPD8ioa37cFY/S/SbIjHK6e+FbJSB5NL4m0+phx6MzNMHyZh6I8NGJ/H6t9BZJ465LLe9/pwq+f/yh6OZ7HJXG46/RxbVXuQxl7iouyPzEmnJDxvY5ZzgzzPb6fR3j5D3ilPHXGmjLVLb2L1/xFMoTkNptQofvPXjDSa2dPgYsxn0WKY8uhD4HtV4Wgi51KvCzNh44hfeNk+ioz21mVdgnu4xyakj2CriZdjVgs7kOfzaBYvf3d4Ht2OKd3S+vd3qvvuWbC+9nZsknyr//2EZHU5M51mwfrbOzAF1e1YGzyXPx9LXltSxO1uzKroPExpOos/n8HzMMtalYZT7tQRd+H8dg/M+uJV8pyNdsrB9/RY2zZRv+Fl/QEPV5aFob/XD1sIutzT6xxMyfAhLRyp2UT+HF4Pspy0+zszYkrY39L1NKzz8a0YmXKKsfesXh62xSyw3iHjCFNP06E0LGc2biWdsD5jSHf1KjeNkvAs7deLYouuz2IKneOwPiXHwXMha6HkXkvpNLV/is5RfMm4BvXzyR2OVgkhXIhNRI6Lpr0lhPBtbGDzJtYw/CLGeGCGnEGYqe5nfq9vdM1xCOFcbKVk38wwDcLMWYswDYlmFUII4Wxsn1uPshI5x0XTuBJCWA6bOP8KM1P8Q4xxnwo51wLvxxi/E0LYAGuANokx/tSfL4p12te1Ksc1sUdjk9SjepLTRNbGLmv5GONnHp6/Af+vSL8acs7Btqz8NoSwO2Yq+lKM8aUKOVcDv4wxnhJCOAab3ARsEvkENsBYLsZ4VkbcyrLmwiwwfge8E2N82Fcl/l8NOcd7mH6POQkcgq1u/bPIx5py/o11rE9hljJzxRhP6UFGiDHGEMIB2ADpE2xF8yVs8vYtbJD6aYzxGxXhKcv61GU9hU1Gto0xvpCTRi5vHKYEOBL4J7ZCNRt2bNatGe+Xw/OZh+c5bMVnf2xisUBPaZTIuxr4aYzxzBDCGGzwNxYbzN6LmUD/J8a4e01ZC7msYnvSw5hp9uwxxiNaDNMPMdP0t0IIC0Zb/ehJzp5YPd3FV56Oxiaqo7A6chrwt5pylsZWs/8HGzA9jbXbw2OMh/Qkp4msZT1Mf8MUL9/FFCczZ6RRKmc5rDx94vFaArN2+WF0q7wKWdcAr8cYLwohXIqt/J7tz9bGLBb+FmP8WQ05F2J92Xn+bCBWX74WYzyshpxLXE4RnqWwOvh5jPG3FXLmwZS+K8YY/xPMwuxCbJ/x0T2920TWJcBvYoynhRAOwlbw/4EpER7DVsCXqWpzm8hZC+sfl8FOiDg1hDBNrLa8SOUcillO/Bz4X2xieDXwPxntbeGMrl+M8egQwjuYEuZlrO4ujFviVI29QggXeZjOCCEshinsfuPxOxWzXFgpxnhVTTnnYMrbjzFfWr/zFdz/60lORphOiTF+HkIYE2P8SQ05S2Bt+C8xxe7p2FaFBWKMF1aFyeXdAfwHy6vZsPHXI/5sLNZmHlrUn0xZxYlfx8QYH/dnA7Ey/5to1jQ5cqbDFt2+CJM/XwhTSjxbIWcLbFHg98mYdDasXxqA9enPxRifzohbIet3sTTxCmY5cw7wUYzxpCpZTWTPlIxV58QsH9braRyXhOeT6JY/bhGwL1Zf+mFbYK7P+P9XY2ndH3jF+7s5MGXuQDLTKZHTF1sAOBv4Py/bg/36t1VplMjpB7zmbcswTFE9BLMmeCU2rM9y4/YapmD8L7a9bFFqpNNUT10NhD76YAOGj7E9rT+itG8Xa2yHUr0HuyznsNLzxTBtfM4RUWVZR5aeL4DtU6063q8s54jS80Mw89uq0zSanfBxIY3Vk9mxvZ5VDnOq5IzCvM5XOoTpRlbtU0d6CFMhZ9pMObNhe8+m9eufYFtlNsc613P9fo6vg+5kfd1lnZ9ZtruTs7XLybIs6kHOFi7nLL+f6/F6nKf5sl4vXsZWJQrv99mehZvI+j6mdKn0MN9E1m40PF3/Ahuw30um07NuwvOKx22L3DSiQ6fgZMo6w+9XtW9VcrLKpL87O3CFfz/f031ubDJ4HvkOwZrJmd3lXIBNCnL3Ffck66JcWd3ImQubWJ5aI27z0IETjHqQc2Lpdz2ubnYqPMX/ogPOlF3OSUUYsJW1QzCnw3th1gY59a07OSu4nIvalLMqZvl0NhntfyJvDLag8ShdHdj2xdrjylNHaPgDOD65dy5myn0LmU7hepBzjMvJdsCXEaasPfMZcgoLmNx+aTO6OuXeEVNyPIhbvGBKgVZk7URrDr57ClPlinQiZwfsuMkbsBX34XXKYoWsoaXn05Hv12cLbAw2UR55fbqECh84GeHJdu6MrdI/iVkPrIRZYo1tIY3Kcm6nq6XKdORZYJXl3NFKeHoI0+LJ82zfbl+Fz2QPgD5T3gebdB/p31fFBjXfp+Ec7iCanNGaKeelotMAdiff2U2VrD3IOyKsu7gV2x32JWP7hf+2pxM+7sdPD2hTzoPUM23ryKkjFXIeqCGnMP0ejCtJivu4f4AacetO1lDM9HWeDsh5FNvL227cHicxn82U12zy/jCwdh05TWT90mXdRw1FgMsZgw1gXgMm+L2sdM6I2wNkOqx0GXPS5ik4mbIep3SEWYtyHskp35jFziDMgdrr/t6Q5Pmz+FGIbcr5HvlHaVbJej5HVqacyrj5bysn3eQpJ3uSU8dTeUfC002atexMmZ4n3S+QedRnD3L6eFlavk05/VxO1okcJZmjsJXE0zDF3srAD2q8vxi2JeB6zOrhdb8/GrPEynXG25Ocy3PlZMi6gnyHpT3JuYpMhaK/sxkdcMrdSVlVcmrk3emYMmJHTOF9Eab0HuTPi22eOW1Kd7IKp8GVx87676oUAQOAcR2I2xY14nYfyRgE205waXK9VmbcquRkOWLsVHgyZa2Rm05fhc9kD4A+U+YnbZQxU6CdMNPpnwAfdEjOLzoUpvfqyOpg3Dpywken5PTWMHUjf01aOLlkUsqa3HLo0OR9Esjawjve4uSJViZJnQxPR07B6aSsDsrZDtsOVJwV//XJKacXh6ntE4x6o5xuZB9HjUFy8l5bk+7eKieRNwN2ItPb2CpjtnLS3x+Lbbs6iMaJKBuTnGT0ZcrpjWHC9vDfRulIT2w1+U4yvPrXkFU5Uc6UU+WzKGBKscVwXw3A1zAn6s9ii0qnkxzh3QFZWWMmqhUTG33JceuPHam6CF2Vpj/07/tiDhY7Iee6DsjZpyo8nYzbV+0z2QOgz9TzwVZi/06NI94mpZzeGCbMvPBzMlcCJ7Wc3hgmbMvEjzqU/x2R1Vvk0IHJ+ySSNdT/Zq+4TcrwJDIHYHuWO1GWOiKrHTnYYPD/t3f3rFFFQRjHnzExjRhJkUYRFBWshBQqghDxA4ivhYoW2ljFyspeUmpho5UgqE3ETjGNjVoLNn4Av4BiYQxjcW7IJYibuzt37+zu/wcXgxce5gRSnNmzZ/arHJv+oHKp2mJXOVlr+kd2X5vuzDlBfx8Dbbqz5tTyZlVOCR0I+F3NSvqihqe42srpsiYFXcodmRWYUz/VMLMl94jKnUVr2t4l2CFZCmpMtLm2Le8eq0wP+azeF9emyonOmqSn8wJ4xudR+YRpJUtOxpoUN+EjJCdbTSrHbo9KWgqoJyQrYc6e6t++N+9tZEU8wWsLmYITmRVcU6OZ823nZKxJQUdRs+VEPgradGfLaeH3NKU+7plpK6fLmlTuTHqi2m37kpZUTnG+VPm62sNhZrWQM1/7vxltnjBalvSi4doGylJcYyJ6bU9Vm7ihzZMSt1Q+XOp5X0y2nOisSXqY8oAwZjYlaZdXUxW6zklcU8iEj8hJIdlqqs0jHlhUVrYcAACGycxOqNwD8kbSKUmv3P1B7f0ZldMBP72ajNB2Vos5z7027cTM5lROPLxz918N1zZI1sZUtfu+OQ1tRtK6u6+b2bLKvT5Xk6xtQaXJcdzdf4xKTnTWpNnRdQEYH+6+HrHhjspJXFNIEyAqJzIrMCdswx2VlS0HAIAOPHL3OyonAE6a2Sczu1K9W1D5ZPe/zYQWstrIOW1mH83sQvXupqRvvTbckVnVBveyyqfiq2Z2T5Lc/XfVTJhTmSZzO8Hazlfvjkm6sc0Nd7ac6KyJwQkFAAAAAD2Z2bS7/6l+3inpmsqGdJ/K19UODztrCDl7Vb5PfyhgbdvOqhoKZ9192cwWVUYPz6uMnn5tZnclrbr71yRr26cyxvrgKOZEZ00SGgoAAAAA+mJmuyV9Vxkf/jZDVracfrMiGwER9bSdlS0nOmtcTXddAAAAAICRdU7S+6DNVlRWtpy+sjaaCdXPa5KemdmKqg3usOsZQla2nOisscQJBQAAAAB9GefLtJOu7bqkS+5+MUM9kVnZcqKzxhUNBQAAAAAYAWxwkQ0NBQAAAAAA0BhjIwEAAAAAQGM0FAAAAAAAQGM0FAAAAAAAQGM0FAAAAAAAQGM0FAAAAAAAQGN/AXXgxn7APOa6AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABBQAAAGcCAYAAABpxPOOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACQGUlEQVR4nO3dd5gkVdXH8e/ZxJJ2l7CEhYUlZ0lLEpGcQRAMIMiSJUoSBEkrSYIiOWfJOQeJkpEVFAQMCCog6poQ46ty3z/OKbqmtmf6VncvMyy/z/PMM9PV3Wdu5Vvn3rplKSVEREREREREROoY1N8FEBEREREREZEPHyUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFEREZJphZi+Z2Vr9XY6pzcyOM7M/mNlv+7EMa5nZm/31/wcSM5vOzF42s7n7uywFM1vDzH7ay3tzmtkrZjbdB10uERGZtiihICIiHwpm9kszW68ybUcze7x4nVJaKqX0SIs448wsmdmQqVTUqcrM5gMOApZMKc3V3+UZKMy9ZmYv98O/3x14NKX0dj/876ZSSo+llBbr5b3fAQ/j5RYREWmbEgoiIiJd9AEkKuYD/phS+n3dL35YkyiZPgnMASxoZiv19qFIPHS7/rMH8J0ux5zargK+1N+FEBGRDzclFEREZJpR7sVgZiub2SQz+6uZ/c7MTo2PPRq//2JmfzOz1cxskJkdYWa/MrPfm9kVZjayFHeHeO+PZnZk5f9MNLMbzexKM/srsGP876fM7C9m9raZnWVmw0rxkpntZWY/N7N3zexYM1vIzJ6M8l5f/nzpe+sB9wNjouyXxfRPxe0efzGzR8xsicoy+aqZvQD8vVlSwcyWMrP7zexPsay+FtOnM7PTzOw38XNab93kY54WLr2+zMyOi7/XMrM3zeyQWL5vm9mWZraJmf0s/u/XSt+dGMvgilg+L5nZ+D5XPkwAbgPujr/LZXvEzI43syeAf+BJh8VL8/xTM/tc6fObmtnzsS7eMLOJvf3T6DGyIPBMadom5rdAvGtmb5nZV0rvHRzz/xsz27m63PpSZ7ux1rekPBPLYf6c/y0iItKMEgoiIjKtOh04PaU0AlgIuD6mfzJ+j0opzZRSegrYMX7Wxi8OZwLOAjCzJYFzgO2AuYGRwDyV/7UFcCMwCm/5/R9wADA7sBqwLrBX5TsbAisCqwKHABcA2wNjgaWBbaszlFJ6ANgY+E2UfUczWxS4BtgfGI1fUN9RSUhsC2wa8/zfckwzmxl4ALgXGAMsDDwYbx8e5VsOWBZYGTiiWq5McwHD8WV3FHBhzO+KwBrAkWa2QOnznwKuxZfp7cT6aMbMZgA+gy/7q4BtmiRkvoh38Z8ZmIwnZq7GezVsA5wT6xrg78AO8b83BfY0sy17+ffLAK9VluvFwJdSSjPj6/KhKOdGwFeA9YFFgPWor/Z200yU91V8vYqIiLRFCQUREfkwuTVa4f9iZn/BL/R78x9gYTObPaX0t5TS0318djvg1JTSaymlvwGH4RelQ/AL1TtSSo+nlP4PvxhOle8/lVK6NaX0XkrpnymlH6SUnk4p/Tel9EvgfGDNyndOTin9NaX0EvBj4Lvx/98B7gGWz1oi8HngrpTS/Sml/wDfBKYHPl76zBkppTdSSv9s8v3NgN+mlL6VUvpXSundlFLR2r4dcExK6fcppcnA1/EL83b8Bzg+yngtnmw5Pf7fS8DL9Ly4fTyldHdK6X/47QR9XfhuBfwb+C5wFzAUTwSUXZZSeikupDcCfplSujTW0fPATcBnAVJKj6SUXoz1+QKesKmuv8Io4N0m87qkmY1IKf05pfRcTP8ccGlK6ccppb8DE/uYp950a7shyj2qjTKIiIgASiiIiMiHy5YppVHFD1O2+pftAiwK/MTMnjWzzfr47BjgV6XXvwKGAHPGe28Ub6SU/gH8sfL9N8ovzGxRM7vTzH4bt0GcgF9Al/2u9Pc/m7yeqY/y9lr2lNJ7UZ5yL4o3ql8qGQv8Iid2/D0ms1xVf4zkAPj8Qd/zXH6CxT+A4c1u1wgTgOsjOfAvPDkwofKZ8jKYH1ilkpzaDu9FgZmtYmYPm9lkM3sHHyOhuv4Kf8Z7PZRtDWwC/MrMvmdmq8X0HtsSPZdtrm5tN+Dl/ksbZRAREQGUUBARkWlUSunnKaVt8S7tJwE3mtmMTNm7AOA3+EVmYT7gv/jF2tvAvMUbZjY9MFv131Venwv8BFgkbrn4GmDtz02fepTdzAxPErzVR/nK3sBv82gZG18uv+nls/8AZii9/kCeQGFm8wLrANtHAue3eK+STcysnAQoL4M3gO+Vk1NxC8me8f7V+G0WY1NKI4Hz6H39vQAsUE52pJSeTSltgW97t9K43eZtfN0U5mtjlrsiyrsw8KP+KoOIiHz4KaEgIiLTJDPb3sxGR4v9X2Lye/j98+/R8yL6GuAAM1vAzGbCexRcF93jbwQ2N7OPx335E2mdHJgZ+CvwNzNbHNizxec7cT2wqZmta2ZD8UdK/ht4MvP7dwJzm9n+MQjjzGa2Srx3DXCEmY2Oi/OjgCt7ifND4AtmNjjGCujtFoFu+yLwM2AxfKyH5fCeKW/S+3gCdwKLmtkXzWxo/KxkjcEsZwb+lFL6l5mtDHyht3+eUnoTH4tgZQAzG2Zm25nZyLi946/49ga+rnY0syVj3Iej257rzq2M3/bRTi8JERERQAkFERGZdm0EvGRmf8MHaNwmxjf4B3A88ER0d18VuAS/T/9R4HXgX8C+AHGv+r74ff9vA38Dfo9ftPfmK/hF6Lv44IPXdX/2XErpp/igfGcCfwA2BzaP8R5yvv8uPkjg5vhtBj/HB6cEOA6YhLfCvwg8F9Oa2S9i/AW/feDW2jPTngnAOSml35Z/8F4F1dsegPfneQN8MMbf4PN9ElA8wWIv4BgzexdPolzfLE7J+fQcW+KLwC/jdpc98OVBSuke4DR8kMZX4/f7zOxrZnZPzkzXZWb3lJ+kEWU6b2r8LxER+eiwlPrqBSkiIiJl0YPhL/jtDK/3c3FkADB/lObzwLoppbdrfjfh29KrU6Vwzf/nHMD3gOVjzAkREZG2KKEgIiLSgpltjj9K0YBvAasAKySdRKVD/ZFQEBER6Rbd8iAiItLaFnjX+N8Ai+C3TyiZICIiIh9p6qEgIiIiIiIiIrWph4KIiIiIiIiI1Dak9Uemvtlnnz2NGzeuv4shIiIiIiIiIhU/+MEP/pBSGl2dPiASCuPGjWPSpEn9XQwRERERERERqTCzXzWbrlseRERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpLYh/V2AD7Nxh97V9nd/eeKmXSyJiIiIiIiIyAerZQ8FM7vEzH5vZj+uTN/XzH5iZi+Z2cml6YeZ2atm9lMz23BqFFpERERERERE+ldOD4XLgLOAK4oJZrY2sAWwbErp32Y2R0xfEtgGWAoYAzxgZoumlP7X7YKLiIiIiIiISP9p2UMhpfQo8KfK5D2BE1NK/47P/D6mbwFcm1L6d0rpdeBVYOUulldEREREREREBoB2B2VcFFjDzJ4xs++Z2UoxfR7gjdLn3oxpUzCz3c1skplNmjx5cpvFEBEREREREZH+0G5CYQgwK7AqcDBwvZlZnQAppQtSSuNTSuNHjx7dZjFEREREREREpD+0m1B4E7g5ue8D7wGzA28BY0ufmzemiYiIiIiIiMg0pN2Ewq3A2gBmtigwDPgDcDuwjZlNZ2YLAIsA3+9COUVERERERERkAGn5lAczuwZYC5jdzN4EjgYuAS6JR0n+HzAhpZSAl8zseuBl4L/A3nrCg4iIiIiIiMi0p2VCIaW0bS9vbd/L548Hju+kUCIiIiIiIiIysLV7y4OIiIiIiIiIfIQpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitbVMKJjZJWb2ezP7cZP3DjKzZGazx2szszPM7FUze8HMVpgahRYRERERERGR/pXTQ+EyYKPqRDMbC2wA/Lo0eWNgkfjZHTi38yKKiIiIiIiIyEDTMqGQUnoU+FOTt74NHAKk0rQtgCuSexoYZWZzd6WkIiIiIiIiIjJgtDWGgpltAbyVUvpR5a15gDdKr9+Mac1i7G5mk8xs0uTJk9sphoiIiIiIiIj0k9oJBTObAfgacFQn/zildEFKaXxKafzo0aM7CSUiIiIiIiIiH7AhbXxnIWAB4EdmBjAv8JyZrQy8BYwtfXbemCYiIiIiIiIi05DaPRRSSi+mlOZIKY1LKY3Db2tYIaX0W+B2YId42sOqwDsppbe7W2QRERERERER6W85j428BngKWMzM3jSzXfr4+N3Aa8CrwIXAXl0ppYiIiIiIiIgMKC1veUgpbdvi/XGlvxOwd+fFEhEREREREZGBrK2nPIiIiIiIiIjIR5sSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSW8uEgpldYma/N7Mfl6adYmY/MbMXzOwWMxtVeu8wM3vVzH5qZhtOpXKLiIiIiIiISD/K6aFwGbBRZdr9wNIppY8BPwMOAzCzJYFtgKXiO+eY2eCulVZEREREREREBoSWCYWU0qPAnyrTvptS+m+8fBqYN/7eArg2pfTvlNLrwKvAyl0sr4iIiIiIiIgMAN0YQ2Fn4J74ex7gjdJ7b8Y0EREREREREZmGdJRQMLPDgf8CV7Xx3d3NbJKZTZo8eXInxRARERERERGRD1jbCQUz2xHYDNgupZRi8lvA2NLH5o1pU0gpXZBSGp9SGj969Oh2iyEiIiIiIiIi/aCthIKZbQQcAnwqpfSP0lu3A9uY2XRmtgCwCPD9zospIiIiIiIiIgPJkFYfMLNrgLWA2c3sTeBo/KkO0wH3mxnA0ymlPVJKL5nZ9cDL+K0Qe6eU/je1Ci8iIiIiIiIi/aNlQiGltG2TyRf38fnjgeM7KZSIiIiIiIiIDGzdeMqDiIiIiIiIiHzEKKEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitbVMKJjZJWb2ezP7cWnarGZ2v5n9PH7PEtPNzM4ws1fN7AUzW2FqFl5ERERERERE+kdOD4XLgI0q0w4FHkwpLQI8GK8BNgYWiZ/dgXO7U0wRERERERERGUhaJhRSSo8Cf6pM3gK4PP6+HNiyNP2K5J4GRpnZ3F0qq4iIiIiIiIgMEO2OoTBnSunt+Pu3wJzx9zzAG6XPvRnTpmBmu5vZJDObNHny5DaLISIiIiIiIiL9oeNBGVNKCUhtfO+ClNL4lNL40aNHd1oMEREREREREfkAtZtQ+F1xK0P8/n1MfwsYW/rcvDFNRERERERERKYh7SYUbgcmxN8TgNtK03eIpz2sCrxTujVCRERERERERKYRQ1p9wMyuAdYCZjezN4GjgROB681sF+BXwOfi43cDmwCvAv8AdpoKZRYRERERERGRftYyoZBS2raXt9Zt8tkE7N1poURERERERERkYOt4UEYRERERERER+ehRQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREausooWBmB5jZS2b2YzO7xsyGm9kCZvaMmb1qZteZ2bBuFVZEREREREREBoa2EwpmNg/wZWB8SmlpYDCwDXAS8O2U0sLAn4FdulFQERERERERERk4Or3lYQgwvZkNAWYA3gbWAW6M9y8Htuzwf4iIiIiIiIjIANN2QiGl9BbwTeDXeCLhHeAHwF9SSv+Nj70JzNPs+2a2u5lNMrNJkydPbrcYIiIiIiIiItIPOrnlYRZgC2ABYAwwI7BR7vdTSheklManlMaPHj263WKIiIiIiIiISD/o5JaH9YDXU0qTU0r/AW4GVgdGxS0QAPMCb3VYRhEREREREREZYDpJKPwaWNXMZjAzA9YFXgYeBj4Tn5kA3NZZEUVERERERERkoOlkDIVn8MEXnwNejFgXAF8FDjSzV4HZgIu7UE4RERERERERGUCGtP5I71JKRwNHVya/BqzcSVwRERERERERGdg6fWykiIiIiIiIiHwEKaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrV1lFAws1FmdqOZ/cTMXjGz1cxsVjO738x+Hr9n6VZhRURERERERGRg6LSHwunAvSmlxYFlgVeAQ4EHU0qLAA/GaxERERERERGZhrSdUDCzkcAngYsBUkr/l1L6C7AFcHl87HJgy86KKCIiIiIiIiIDTSc9FBYAJgOXmtnzZnaRmc0IzJlSejs+81tgzk4LKSIiIiIiIiIDSycJhSHACsC5KaXlgb9Tub0hpZSA1OzLZra7mU0ys0mTJ0/uoBgiIiIiIiIi8kHrJKHwJvBmSumZeH0jnmD4nZnNDRC/f9/syymlC1JK41NK40ePHt1BMURERERERETkg9Z2QiGl9FvgDTNbLCatC7wM3A5MiGkTgNs6KqGIiIiIiIiIDDhDOvz+vsBVZjYMeA3YCU9SXG9muwC/Aj7X4f8QERERERERkQGmo4RCSumHwPgmb63bSVwRERERERERGdg6GUNBRERERERERD6ilFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2jpOKJjZYDN73szujNcLmNkzZvaqmV1nZsM6L6aIiIiIiIiIDCTd6KGwH/BK6fVJwLdTSgsDfwZ26cL/EBEREREREZEBpKOEgpnNC2wKXBSvDVgHuDE+cjmwZSf/Q0REREREREQGnk57KJwGHAK8F69nA/6SUvpvvH4TmKfZF81sdzObZGaTJk+e3GExREREREREROSD1HZCwcw2A36fUvpBO99PKV2QUhqfUho/evTodoshIiIiIiIiIv1gSAffXR34lJltAgwHRgCnA6PMbEj0UpgXeKvzYoqIiIiIiIjIQNJ2D4WU0mEppXlTSuOAbYCHUkrbAQ8Dn4mPTQBu67iUIiIiIiIiIjKgdOMpD1VfBQ40s1fxMRUungr/Q0RERERERET6USe3PLwvpfQI8Ej8/RqwcjfiioiIiIiIiMjANDV6KIiIiIiIiIjINE4JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqW1IfxdAgIkjO/juO90rh4iIiIiIiEgm9VAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHa2k4omNlYM3vYzF42s5fMbL+YPquZ3W9mP4/fs3SvuCIiIiIiIiIyEHTSQ+G/wEEppSWBVYG9zWxJ4FDgwZTSIsCD8VpEREREREREpiFtJxRSSm+nlJ6Lv98FXgHmAbYALo+PXQ5s2WEZRURERERERGSA6coYCmY2DlgeeAaYM6X0drz1W2DOXr6zu5lNMrNJkydP7kYxREREREREROQD0nFCwcxmAm4C9k8p/bX8XkopAanZ91JKF6SUxqeUxo8ePbrTYoiIiIiIiIjIB6ijhIKZDcWTCVellG6Oyb8zs7nj/bmB33dWRBEREREREREZaDp5yoMBFwOvpJROLb11OzAh/p4A3NZ+8URERERERERkIBrSwXdXB74IvGhmP4xpXwNOBK43s12AXwGf66iEIiIiIiIiIjLgtJ1QSCk9Dlgvb6/bblwRERERERERGfi68pQHEREREREREfloUUJBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqG9LfBZDuWubyZdr63osTXuzx+pXFl2i7DEv85JW2vysiIiIiIiIfDuqhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUN6e8CiOT61uc3a+t7B113Z5dLIiIiIiIiIuqhICIiIiIiIiK1qYeCfOS8eehjbX933hPX6GJJREREREREPrzUQ0FEREREREREaptqPRTMbCPgdGAwcFFK6cSp9b9k4Dp7j4fa/u7e563TxZJ038SJE/vluyIi0v8efGihtr+77jq/6GJJRERE+s9U6aFgZoOBs4GNgSWBbc1syanxv0RERERERETkgze1eiisDLyaUnoNwMyuBbYAXp5K/0/kQ63dlq5qK9dcD/+w7TL8du3l2v6uiLRvmcuXafu7L054sYslaZiWe5cNRO0eu3XcFhHpH6pzN0ytMRTmAd4ovX4zpomIiIiIiIjINMBSSt0PavYZYKOU0q7x+ovAKimlfUqf2R3YPV4uBvy06wXpX7MDfxhgsQZanG7GGmhxuhlroMXpZqyBFqebsQZanG7GGmhxuhlroMXpZqyBFqebsQZanG7GGmhxuhlroMXpZqyBFqebsQZanG7GGmhxuhlroMXpZqyBFqfbsQaK+VNKo6sTp9YtD28BY0uv541p70spXQBcMJX+f78zs0kppfEDKdZAizMQy6R5+3CWSfP24SyT5u3DWSbN24ezTJq3D2eZNG8fzjJp3j6cZRqI8/ZhMLVueXgWWMTMFjCzYcA2wO1T6X+JiIiIiIiIyAdsqvRQSCn918z2Ae7DHxt5SUrppanxv0RERERERETkgze1bnkgpXQ3cPfUiv8h0M3bOboVa6DF6WasgRanm7EGWpxuxhpocboZa6DF6WasgRanm7EGWpxuxhpocboZa6DF6WasgRanm7EGWpxuxhpocboZa6DF6WasgRanm7EGWpxuxhpocboda0CbKoMyioiIiIiIiMi0bWqNoSAiIiIiIiIi0zAlFERERERERESkNiUUBDOz/i6DyNSi7VtEBhIdkz7czEx15w+I9pUPlrbtD9a0tH1rw+mSeETm2mY2zsym2mCXZrZx5XXHG2Pq4kAa09LOUZia67NTnS5vMxtmZuua2epdjNnRccXMRprZ0E7LYGbzmNl8wKBO43VLlGmwTtofjG4v72nx+NYtZraEmY3sYrwBu6wHQtm6vbwHmuoy7tYy71Kd6b1ulGUgmVrLu1PdqJ92a96K7w2kOmG311On2/bU2I4GyrZYNdCuv/qbKrVdYGZzA5cDRwNnAZ+bSv/nq8C25cpxuxujmW1nZoeb2R1mtmtpunWyk6SUUjcq72Y2rNMYEafTi+MRwAVmNtrMBncYa4iZfSwqgvOa2UxtxpnezD5rZrMW67+DZf5tYD/gNjM7EtrbpuICfm4zm684IbWz7KOCfB4wd/WkXTPemfi8PQqcDmxtZgu1U65Ybyub2UptlqX4zszADcAanZy0y+u6C9v3TGb21bjoLipL7czbUDNbw8zGm9mqZjaqgzINM7NN4meuDsrUreW9oZmtZmZW2t/arZCOMLNzYx9ue93F+lrCzBaOpMn0bcbp5rK+EtionXI00+m5ZGpu2x2cd2cwswOBQ8xs+3J56pRtaizvbolz5UqtP9nSkDivLAyN7aGN4/f0ZjbWImneSQXezBY1s5fNbNV2Y3RTF5c1dGl5V3VwrJzbzLapHtvajDckzuOdztv08f3/xvfbOj7FcXe4mS3Yzver4SLm6I6CeKPot8xsxtK0/lzWRf2744vuWNZzmtlyHcaZzsxmM7MVOi1XHJMONLMZStOs0/2tPw2YLNuH3AnAvSmlE8xsE+DbZvaDlNJPu/UPzGwWYE9gtZTSe2a2PLAO8BrwWErpDzVijY4yHwi8AhxmZtsDx6WUHmizfPsC/wIuTSn9N6YNTin9r2ac3YCxwO/N7PKU0rul9yx3BzazzYAlgD+Y2VMppZ/U+X7JCcB7KaXJUbEcAswL/LLuvOHJpjmARYEHgLfN7HHgyZrlOgrYAbjJzJ4Aro9totb8mdkSwCdSSsvGSfsiMzsH+BvwQErpuzXKdBK+XLYws+8AexfbQU3HA++klH4dJ4GZ8fX4/dx4cSLbKKW0UJyQtgfWA5Yys3NTSr+pWabTgDmB9c3svJTSoW2eSI4BRgPnmtnXUkq3mNmgNi52L4iT0EEppbehsW+0sY0fCwwqtuU29xHwZTRL/P034D0zuwu4q435OxVf3gsDj5jZgW3OW8fL28xWBG4BzgeWNbNHUko/6aAi8XW8HvLPqFQmYAzwRs3ldDowCt/nfgG8amYPpZSeqVmebi3rY/EGimPNbHbgnA4uuvcF3gPOL51L2tlPBuK2/Q38HPBTYHXgVjObE/hVzeNl15Y3gJmtBbyYUvpjuzFKzgfuAp5t8n/qrIPTgP8Cc5knmg9OKb3YRpwLgH8AK5nZOSmli0rlqbtdHY5vA5ua2Ysppb+3s12Z2WfxutvLKaV/1vluRbeWNXRpeUddYm5g/pTSwx1slxcD3y2WT7GuigvUmuvtbGA6YLYo38Eppecibp3ldE3sr7uVlk3tei6+rGcA/mtmJ6aUXi3eaGO9HWVm44C/xzngpjaPdacBT6eU/h7lmLH4u6ZuLWuAq8wbbPdJKT3dZgzwRiqLMn0jpfRE8UbNeGfh8zbOzH4EfA34exvnAfDl9GhK6R9RjmEppf+Lv9vZpvpfSkk/HfwA8wAPA2NL084EDou/5wPW7cL/GQvcGH8vBUzCL76+C9wGjKoRawfglsq0XYDfAKcAQ2qWbXbg18DTeIvgVjF9VPwenBlnBeB54GDgHmBdvLK9dM3yLA88F/NyLnBSm8t8duBxYI54fRJwM3Bp/D13jVjjgNfi7+HAFvjFxcnAqjXLtShwHbAbcAZwJHAYfpKrE2dr4KYoz6bAn/Ak1UTgKWC5zDjLxnqbEb/IuQnYpo3lPQq4D1g8Xp8HXA9cG9vVkjXW/8N4haaYtiB+MvhBzfW2LDCptN6uxy/CjgLWrxFnOfxkDbBNzNuINpbRIrGf3oxflBxS3l+B4TX3kydLr7+It3qeTI1jVizbl+LvwcBe+HHpdGC9mvO3HPCD0uu78YrlN4A9gOk/4OV9VPz/z+I9Xk4EtgXGFNtHjVhLAS8V5QCOiPV4WcSds8Y28GL8PR2e9HwCOA5YrJ+X9Rp4b73F6y7rUqy/AFfEvrZZ5f3cc8nU3Lbva3PbXgp4rvT62Vj/58XvJT7o5R0xPo0ncM4DPgnM0EGsjYHHS683BPbHzykz1YizCX7unQs/L9yPX8ifTb1j3KbAY/i5aW3gHLxh5nRgZBvz9j38XH4HcBUwXRvLaMtY3mfgye6FwR/h3h/LupvLO2JdiR/TfhrLaXTd+QNWAp4tvZ4Q6+xiah7HgQ3w+vLcwKzAV4E/4PW4GWvEmT6Wx+t4Xamoj68JDMvdFoBPxbJeAT8eHQBsDnyJzGNuKdbm+LF/PWB3vJ40Vxvb0mLEMSVeH4k3el0DfPyDXtYRawiewPsN8HO8zlvUnefCk8U5cYpjwDx4nfkYYFf8fFnnmLRZKc6CwEXA+LrLOmJ9DHim9PrLeMPFVcB87cQcCD+65aFDKaW3gH2BP5cmXwYsHX+fgW98nXoTmGxm++AV5LNTSoenlDbAD/yfqBHrDuCPZrZmMSGldDF+gJsVv5DOlrx3xGF4hfQ7wDZmdhFwp5mNSPmZtr2AM1JKpwD34pX584D9zGwnyO6CdQxwekrpYLxy/HEze79rqGXeahDzNQlvmVwar8gdhFd2Z8MrrbneA143s+VSSv9KKd2Gz9vvgbMts+ub+W0XvwJ+G2W4BK/oHgksaWbz1CjT7fjB+lb8wH1ISumhlNJE/IJ87cw4OwFXp5T+nlL6C7589jWz4VHm8TlB4rsPAcubj30wD35hcxjwMzKXd0rpebxCtIf5rQrDU0qvpZT2wU++y2bOF3iF76r4e2O8i/HtQAK+EdtFjt3xihb4SWkU3iK8WI2ygO+fh6aUtoqybQg8bWZbx/vHmdl6mbF2AlY1s/nMbGX8JHsd8E/gRDNbKjPODMBvzGyx2Nevw3srvQh83czGZMYB2AqvxGBmWwKr4JW4n8R7uevuS3RneV8IHJFSugFv6fwt8HHg82Z2DH6cyrUm3qq8iplNwLelo/D9ZX68kpFjbuBNM5srpfRvvGXpD/g2ebGZzZp5nOzWsp5AYx/5MTAZuCV6d9T1ceBqfJ9/ANjRzM4375EHsKXFrRkt7Eh3tu3hTLlt/5v2tu0V8caGokfAUGBvPJn0a/pneYMnJb6Cr/djgSPMbGmLW86irLl2AV4z7+K8K36+nBFvWLnCSt2pW1gUuD2l9Ns4L5yNXygPxY9buT4LnJy8pXU5/ML513iPkyejtTnXnsCJKaVfRhmG4beuDINatx0ui19oPYkfv3fHezzMHXE2jd+t9uFuLWvo0vKOso+LeVocr6usmOKqqYb/AG9FzD2iDPdGeZ4ollWm2YEfJe/N95eU0kn48TYBJ1jmrazJe0och9cpx+C9X/+K15Wmj2NxjrXweu5z+L67A74PrgCcZPXGezoAOD6l9EBK6QLgHeALxZs1znd/Av5hZsuZ2Y74uWAC8AJwnUUX/wyz0YVlDX5bCY0k/qp4gupn+IX3DCm/V8BKwGVxrTYzfm4biq/Dayx/HJrPA99OKb2VUnotynNE8Wb5eirDX4G/mN+GsRuerJgI/BF4xswWqRFr4OjvjMa08EMp+4pvqCPwVtXj8C5b3fo/C+EViZvwHa1oJbsF2CUzxhC8UrsXnm3fFa80DY/3nwC2bqNsi+EJhRXxg8ozwC/xHW72jO8PwrsP3Yif3Cbjmdyx+AHyCjKyiXh30suARUvTDgIuir9XB/bPXaf4weexWOZfLr2/D3BhzWV0AJ5w2QqYuTT9FGCHmrHmAy6Iv8/BW4S/jlee6sQZBSyDj/txLtHCCTwCfC5zva1KpXUNb8Ubgbd4vVCjPGvjt+E8BBxZmj4BuCHj+3PF73F49v80/ML7kzH9BeCzNcozU2lb2BpYp/TeCTX2uylaH/EeRhPJzLSXvjdL+Tt46+ukmLdf1Iy1K/A2nvDarDT9JGDHGnGOwFtKL8VbWw+K6aeWl1lmrKHxey1g7dL0w4HDM2Ms3Ony7u1z+DHkELwSVqsnDt6Cczte0d6kNP0g4NQacU7FK7bH4Mf/fWP6GWT2LKos67U7WNazNZl2KJ7sHFNn+cR354zf0+NJliPx3go34beG5MbZA08AvQds3sG2PRE/p1wWy7qTbXtQ/F4aWL40fXfg8swYo7u1vPFk9HgavfAWws8D9+M9cQ4GHsqMNQw/Fp2I109+Q5yH8ePoFeT3MlsNT5ZsjTfIPIEndJfCE06jWnzfojwrx99D8fPjwqXPnEtG62t8f8ZiGyrtM6vhdZ6sc3fEGYT3SJgtpo3Bk2dX4XWyU4neRy3iTIefE78R32l7Wcd3VseTZFu1s7xLcc4BPl96PQF4ovR602IeWu0nxX6K15vGl947E1izxrzNEvOwfWX6rLHcc7fJwbHcvwNMiGkP4Lec/ZHMHksxT2/jY1f9o/j/+K2d15LZ2yjKsj9+jCzqKJsC98ff21HvnLItnuA+juhlHNO/Xsxv5rK+FvhiJ8u6tJ0PxnumHB3T7sd7+b1abEsZcTbBe5UcAbyLJ7iK5XcZsFJGOYbhvUGWoXEMnwO/zjE8KXRX7rzF9w+In8Mo9ZqL7X7zOrEGyk+/F2Ba/cEP8u8Ba3UYZ1H8In1zvOvYXPiF0v2x418CPNhm7HXxpMLt+MX8BZS6PbURbwKeNV80DrKb4q3wn8j8/qxxMLoKuLXy3lOtdvzSZ2eg1J0xltnt8fcDwGdqztcn8JbKP+Bdk4bjXdZaXnDH94eV/t4Tv8g9Ar+IH4xfCLa8MCEqMqXXh+MXSa/HgXx+YIGMOF/EW6LuB1YpTT8ltqd7yLh4r8ScLn4PKW3/n8V7P7S8gAcWLP09L15p+E/M39x4BafPOPgJerfS61H4SfV0/CL3MWokgYBFWrz/UM62VF0npWW0fMQ4N163qmR9NdazleOU3v93zrKOz25eeb0zXukuTpYPZ87bF+L3XHhr2xHAxqX3n6JUOWkRa2J1G6+8/0DmtrRn5XUxT9nLO5bHaXhibZUm7x8OPF9jW1qFnheQG+EXzEXZvkteAm8MnhSeDb9gPgHYqfT+E9V1285P7rJu8j0rlfMi4Moa3216OwOemByPt+p8PiNOdb/4YmzbRdlyt+2Z4vcssW0f1O62Xd0Wm0y/L2f99xavneXdIu7qePLkP/igpnW+Ow6/0DmoMv0H1LtFaGs8uXwh8PXS9KcpnS9axBhe+nvWynuTqNFlmSZd//E6zk/w83qfiUp6NjwNr7y3NH7O/DuZ9aX43hL4RcmBnSzr+M72MS8XtLO88STAxyklz2PaU8DI2A+z65d4wuZHeOPS0cVyi2ktbxPFj5NFcnJdvNv892hcwA+L7WuFjPkqJ/AXxC/6tsdb48EbvVassf53xXsFnUHpNrXc9RZlLxoUpy9NH47XoRfBe8Cs3iLO9HiP56F4I9XZeN32VrzeOwa/gP9k5jobgp/rftbOsq7EKo6/M+LXOrsAP4xpW9bZvuPzE/CB88t13udo47YFetZzN8DrCdm3hsR3F8DrpT+PfW4+/BrohVbrbaD+9HsBptUfvLvX8R3GMPxgfh1+//ezeIV9MJ45Xjc25oUyYx1Jk1Z+/MJvD/yA/7F2yhm/Z8EvSl/HB2KBaPmoGW86PCt6Pt5NeGfg4czvlXsmGHExH8vvQeDajDifxS8YbiZaEeMAvX2si2spnXBbxNoNvzj+FbA+fsD/ON7yc2cciE6rEeeXwBYxbYGYp31qLNvR+Al5M/yEth/eu2TRKNtGeI+DlusNT0J9Gz/BVivxG+AJtTsz4hyBP41hV3pWRoruz2fgt2PkzNfY0ra4MDFeQuwz85B5fzBeSTu4j/f3wQdi7TTOUpnr/wjgvsq0UaW/VwOuyZy3r+ItWINpchGH38J1X0acw4hEXS/vH0xmDy28xf+92OfniH13cOn9AzLLVMQ5BK/EGD0rgy2Xd2xLL8Y2vD/e3XZ5ShVG/AIza2yXWA4PAT+kSe+oKOv9GXG+jFf03gE2bPL+4fggT63i7IwnFB+L+RtLKZFTY1nvjFf0ijjz0nM8j/nwLqI5y2hXvHL2XMznAvRMxO5GqaUzI87z+PFtTOX9fTLnbR+8x83P8MGL16Z0D3DEzt22N6Jn74/3L1BonJdbxoptt9cLOzyhnLW84/Or4hfCawKzxLQhNM7nx9PH/t0k1l4Rq2jBH1V6fyIZPTBKcdYqTSsv9xOoNDb0Emfv2DZPBZZp8v7Xc8rTJNYU+zzeOtly/CI8sX1eZbsuJzyOA+7OiHM0leQrpV5Cucu6EqtcppFtLO8hxXqicbE1qPQ/voAfA1drEecoKr0P8N43r+H1gCuJhHBGmc6J/bg8b1+NWFfi9a9LMuKcjZ8DyuejffBbjSfklCW+c0Ysh/L5aFs8efut2D6uy4x1Jp48Lh9vB5fm8U9k9E7A628v0TOpsQXesPQtvL7bsq6LHw9PjXhF41Kx3rKXdSnWNyPegjFtR/x2tR1rLO/dYp/atzRtI7zn01V4XePqjDhbxPreOdZfeTvYDq9vnJNZptXxxMY6xLUW8JnYN07Dry2+lTuPA+2n3wswLf9Qsytzk+8fQgyeiLfSLIF3t3qIUmtXZqxTYsP/A/C1LszbTLFzzl6Z/ingrC7EGYFfsD6Nd3fts/KOXwTchCddXiaykDROanvG/E9RuajEmQO/MN0Nz4jeSGnAzfjMzJnzNhrPNi4fB57j8C6YHyt9Znb6aJXtJc4xePZ4SUqtLmQMfISfZE+IvzeJ7eFbeJfJc6kxICeepf8tfsLYCa8oz1+UCT8ZLZUR57N4QuFovAKzHn6rxMrxfsvBjqLsxb6yYGwLN8S6nFhzvkbjFzbzlLaJtYnsOn4f3j606L7XKk7NfeRZGrej7I5Xcs7EL1ZH4C3Ws2TEmgVPShWJluUjxqejvENju281b6PwzPq88Xpd/CL1S0RPIrx7YE7L1mh8P18HP6nuWN6m4399nhZdQSPOs3gF/2p6dnPvkVhoEefrxDEMTwL+Ab8QmIwff7MHYov18nO819SCsU98HU/GfA5Pgn6cUiK0lziz48e1OfBkxjfxBOfWNLqrr0aLQRkjziv4sePjUbZnaCQph2cu697ifKrOtl2K9RbeC2E9/JjbY2DX+D99Js57iXNLEQdPoOVs27MBv8OPZaviPQIvxpOvxYX30pnb9pyx3dzHlK2RVpq3PmNFnNfx8/hnKJ0vqbSgZi7z5fHW0MvwbvtHVt4fFvtRzu2K1VgTK++PxZPzfQ46WolzD3BM5f3ReNKszzLhYxQ8F/vHefg5cx4a55OiN0dO0rzPWPGZ6WhxTMD3/5uB/8PHTfpqTJ+79P46VHpRNIkzMvazX+PHui8V2378Xij2nZYDvFZiTSJ6dtE4nsyD18FytoEz6OViD68TvkfcotlHjKVjWRct0+Vky8z4sXgMGQMXRqxXaCS3FsVbzhfCz3Gr4wnPPmPh57CXiTof3ltyIfxYu2aN/W1Z/EK2WLaz4Oe12fEk3GV4D6icc/jSeH3wbrxuPH9MLxI568by7nNwxpi35/EL91fp2ftqeJRtOC3qTjQGQf9C7CPb4A1wtZZ1k1jn4se6xfDjcJ89RitxygO83xvbzlyx/uajcSvsyBZxVsSPSQfiic7H8LrF+vH+nHgPnCluQ+sl1iT8fHIifh13DJ6Inz62kZG0MdDrQPnp9wLop4+V4weab1amDcEvjk+oEWcG/AQ6Dq+8PItnDj9d+sxOtLiwrcS8Ee9SdSh+Ipyu9F5R6WoZr0mccjZ5FvxkPSojzoU0LpT3i4NtuYvZIGDLjDjnAMfG39PhJ8pvl97PHoUXv+A7Kf5eA+/OeCKeEb41N1Yvcb6Bn+huInMU5tgO9iJa6fFeG1+Ov2fGL8Bb3jJRircEfs/8LvhtM5filZPiYNvyaQqxXmaP+Tgg1t1peKXroMxyzIhXtM/As7/PxT4yEm/VuzJnGyrFO5do6cUvKu7FLyR/T2MQzJyTY19xLitv6xmxvoX3uJgd+D6eANgGv9Cp00W2r6fF3IonJ1penMT2cnX8vUgs86/i++FNRKIhs0xn44+sJebrt3hFNvt4VFreE+PvnfFeQROokUyI764dsT6BVyaKfaToTpo9oj5ekbkt/l4A77b/JfzC6BoyEm7x3b2I1qvY7/6JJ98eiH2vZUU0vrs7pVYZ/P7b+/DK9wlUeoa0Gecb+H6d+0SGTaj0ZMLv5X4WP6Zk7ScZcQbnxMKTvndXpq2NVySvp95TBg7En8K0OF55vBNPJs1a+l85CdOJeGV2D/x8cAR+viyO5X1eiDaJdwdxARjb09OUkvax/rJG5m8VK6a3TMJnxsmtU+wWf+9Joxv/9URrK5lPCsDPib3FOiZ32yzN0474cfdZPAn3TxrjKeQu77Xx8+Rq+HG7uI9/VLyfdSzIjJVznlsGP04WF7Tz4Bfvnyx95jpaJ4Lev80Kr/uegvfq+ho9e7vkNJxMIFqN8YTGQ3ivvNvwuqblxIplc3z8vQn+iM7b8HriTjE953h5FNGQh/d8uwZvRDmBuH2D/IT3XTRuNTyf0thepc/MkxHnXmC7+HsfvP7UbDycVsvoLGD3+HsPvH57BY1bqQflxOkj1uV4neJY/BieUz+5qLR+9sNvv7iDeGJTjfKcDuwaf8+IJ/LPwOsHRW/YKZZZL7EuIMZbwRsGrort6MQ6y3sg//R7AfTTx8rxA/MjeEVtodL0Gal5zw5+MTKm9Ho7vDXn7jhAZT9aEb8oeRa/kDwydrr9aLQAzEhGpauPOEUL5/TkVbYWjjjTx+tB+Alk63g9JxmPiIpyH0bp3iz85F+Mv7AYXsHIqdQMxS+OijIdD+xdeu8WMgZPw5Man6bRnbAa5+acOOVyxe8hVE5ksZ1tWXMbPQLYNv6+C2+NO576j/r8JI1Wm3Pwrsan4Ce6nBPICLzicAFxAVd670EyH/EYy3sv/MR2DH6BVLQCzYRXJMfWiHNOL3Guo8bjgfAu5U/hCY29StO/gl+Q51ZGDD8Z7oOfoMv3319N5TF9LWJ9E090nEJpcEq8krNrZowReCtCufvuyjGfK9coy6x4S0u5O+Kn8QRHy8pVJdao+N5psZ6WLb33XTLu469sBxfhCZeHKfUOi+V2DHkVm3nxytBJ+IXWfqX3rs3db/HWmUvwCv/8eBfVL+HH2gvIT07OhyfFmsU5PzdOxBoeZdqJnl23p8eTgbmPU+xWnCGxzo6l0nMklnX2mAL4uWiR+D0L3nPi3IhzOJ7Yb/k4NbxVcoGIs25sN2fg587P4y2MuetuLH5BM1dp2mnEgL6xTretsR1c2yLWFz6oMuHHgaNoXDDej9/eNxxvKf0O+RcAswJHlV5XY11BfiLP8GPdnTQGJnwIf0Tq/ZQecZwRZwx+HFozpj2GJ2EvJvNYR2OAyN5iXUTmAJ94Auf0+HsdvB5wA34ReFRM73M54ce3/8MvQufA66WfwweGvDrmLftCK+brGrwOfQ4+wO/0eO+lS8loUY44H8PrarPFdr52bPNfwI9/WY9CjP9/G56Ivxs/Diwf2+rl5CdNl6N0C0rEfQ0//s5AfkJ4fkq3V8Q83YpfdGcnJ+P/fSm25ZXwnl2bxbJeJfaR7MdY4onq3mJdTl7Pm+Jxk80GeN8Ov5DP7WX8pWKbjNdX4knBY/A6Ym7SfDBeV76YRoPrZXiS8nJqDqY+UH/6vQD6abJS/CBYvs/qJDxT9228y84GeKY856K9z5aGOOj+mXot79MRtw7EAfLzeOX4m/jgJy8TF70t4sxA4z6i3uK0HB8A7wr5KUrZS/zetCKzfA+wc+a8TU+j1WdwlPER/AR1KXBAjeVUlGUQjRGYi2l3UH+E+KFEpbhunNhmluvj/XWBx9rYVhfFK7Yzxza6Dn5BkTN43gylv0fE8l0fb7lZBe/utmaLGPPirX+LRozhlHpZ4BWAlvdeN9kGVsVPStdWlvf95F+8DcNbb44iBrmsG4eeF0Zbxf76Oo1eINdTukcws1wdPS2mNA+z4z1ursVP3ovH9BuJ5EnNcg3DKwPT0bjYykoExfeLezeLe0lHxLb4EhkDuuLH3cXwnlzFU282iHW1On7xlr0tEZXWiLs6Xpm5ksax4Foyxz/BK26fxitcJ+GJpOI4dTMZCZzSsj0aryRdhSdMRsT7D1AabbrVNoAnue6O+Wg3TjFo2sYR5yB8fy6mP0JldPZe4hTrfJOIc2CbccbF7yXwC9qJUbZFYvr9OftIX+XEj1kb4/dgfzXjO0V39nLvvVH4+fJofMT4Wrcx4pXr8mBuiwI3x9/fo8bAxTE/HcfqZplKMT5eef0kTQZZzYy1WqexYrv6Gt4I8kqsx3Mp9Rbt47vlHpfr4I0vC+HJ949FnA3amK91Yl+uHQs/xp4bPzvg3fo3xHsHfgI/3tXpMfEtvLv+Y6VpQ/GL8azxuErHgqPwsXAep2eD3JPU69V3In68vJee40s8Rr1BPb+N9y66ubIuHyL/yQ5Go0dLMZ8L4heqU4yr0yLWoEqcQTSS1nV6Kxted7sQT9iV63VPUy8Ba/iFdkex8GTgNfg5rtrAVGeA9+F4g80L+Hnl3tJ7N1Iv+TIm9pN78UTLQzF9RjypkH0NNlB/+r0A+mmyUrxCtnZl2qKxczwcO9uOmbEOiQ34WHzQxcUr719BxmMUS59/v9JeOSgugldw7qPGYwKbxK8Vh0bXvJlK0wbjt3Zci1fCWz4FI5bvSniFtDoq/wHEqLWZ87AxfVwI4SfcxzPiLItfYH+Z0n2/pRNBbhzDKy8fr0wr4ozFW05atk7TeOrIFsQjuPBeHe8B59dYz5tTaXGi8Xifb2fGMBrdNIuu9nuXttFZ4r3cxwutTynpgrdKjC29Xo+8geGKbWljGhfrs7cR59P0fFTpCPzewG/gAzLdBFyfOW9deVpMzNN6pdeL4xdv10eMG4B7Msu0HH7Bvh9N7mvHLy5bbgsRo/wEhUGV94+nxSMQK9vSRbFs94/p+8T+czqV43KLdbdfk+lH45WH24BHMuIUx4DdaVzsjsIrfvvgvTJaJgKpHJPwC5vZaSQBtiJGLM8sz66xDU2PV4hG1olT+uyRNPbXlfEE0DV4xetGMo5vTeKujFdKr8Arg1lx8BbRM2n0Klscv43nHPwi4L6cfSS+O4KeCQCrvL8V8JOMOFvHMpqxlzj7Aa+1sYzKDRZDYzu/KJZZ7uCuE6hUzPGk4CD8WJAdqxtlws+Tn6GX1t4ob8tBDyvfseoyrxMLH+D20zQGh54eP+6+R2XcihrlGYT3cjsNbww6tNm20UeMPfFEe3H+H4EfR/5Co1t+rW7XsZ1eD1xcmf4kLXqa4Y1HK5Rez0PPcaZ2pM3Hr8d8Ponf/rY9Xh9+ION7WxI902Kd7YCPx/RorM+9yDh+V2IuhteVXsDPAevgt+ZlNeTgda7q47mHxM9eeGIx69GV1W2bRlJhSfzCPadX0Z70rAsMxc/Zd+HjYu1F+0+gG0Jj4PKtc2PFfrlq6fX0eG+CC/DeHFkDvFdizoIn2dagcavaHmQMgFwtUyzfDYgeE6VYWY/mHeg//V4A/VRWiG/w77eKxkFoI6J7NBldfkqxDsIz+6vgWfFT8Gzr5hF7Zmq0kscB6BT6uP8Qf87rFhlxdsErQ4fTpNtYjTjn00vmOg7+/0eLzC09n6ZxBn6P+jdptADOh4+a27LVLWK9SM+L9xlo3GowL5617/MiN+K8gFdwT8EHTfwOjQGLsuLEZycSI+ziLQf745WRE/CLzEFkPBaol+V0HF6J3JpGpanl47PwAXNWK02bNaYvT2WU6BbzVdxXvhieYLkAz5YvhieWNm41X6Uy9Ui6lMuAn1RuzFxv1SezfIPGRUqdOM8Wywg/wc5Lz4Gq5ifvnvBmZar9tJhm23bpvTnxlpJVyeji2Mv2fQU9E4NjaHFbSKv1Fn8vS+tBBqvb0gb4xcxFeCvFMPK7NzZbd4tEnFF4xfDTtH4saXkZnUrjGDAcr9yciSc7c56jXT0mDaKxv84Y20OdY9KpeAX2Qno+3qtlnFKs14sy4T0nRsc2uVBsk5sRScsWcU4lki2V99aJ9bh5ZpxflMozM175m5F4HC9+W1bOuDCGJyB2xFuie4zlE783A9apuYyG4kmOcqvd7rQ4T5Y+OxOeWJq98j+KMp1IxsDF8dk58P1ufGV6cWHyjZxY3SpTbDuv03hM3RBi0Lt4PQ7v1dlyQFz8GD2RngNolm+lWhj4aatYTco0NLbzxeg54Gir89xIPIE3X2X6x+h5S0bOrVOj8dtjigGGi/W1KqUxi3JiNYk9gtKtG7H995lcju3oeSKRX10WsS5+lrnephjgGz+ezIwfc7+D3w7V6ljQo0yl6YvijXJ34Re7rQbAnY4pH1U6B77v74/3nD2JjFux4ns/rJap8pkdaH2em2Jbqq5r/DjXat56246mw3viTcITsX0OgNvX9h3v7Y7fLnhuq1hUjkk0kiUj8Wuh3AHeey1PvD8n3rDbcmD8apmavD8X3hO71kDdA/Wn3wugn9LK8J3x/2jcY3c4fhHwPJ7RzM4+xve/QambIH7xcADeupU1GFgl3kTiOdc0LvrKXclmocXj/eJzJ+CtPbvGfP2ByLLXjDMRuDT+HoZXtFcqvb8tLUYWjs81e5rGpXg32U/ile+sZ4TjI7hfH3/PjGdHL8J7S2wU05fNiLMXpUd2xUHuO3gFZcsacUbgXb6Lloei1e7LeJKpztgZvT115EEaT9Vo+TSFWCZ3x98z4MmJa/GuaJ+L6Tn3AW5FPJ86Xg/FL2onAl9pY9uuJl1Oj211PF7J7fORV30so8tiW/pEjTjH0RjQcXjsJ7fi9xVmDVaZsd5qPS2ml237Uvwe16wu7pnb91YdrrfTYr1ldWtssS0dS+nYlBmr2bq7Bb83OftWkF6W0ZX4hW+dLrs5x6ScQfN6K88vaRyTcu9NXY/GYJVz4Um2m2MZ7Vlj3ooL15fxLs7ZYzdU4qxY2kdG47367ontse76Pwr4I16hPhnveZV9H3GLZXQDNQasrcTrdQDkeH8O8ns+ngKcEn8vjB9jrsMvaobHMpzwQZUJvwWoGITvY/hx6Tp81PnNY3puN+dr8TrXLXiL8hQNKOSde5uV6Xr8/FuM75RzvrweT96fiV/QlhNKRc+VOk+w+Xb8vQTecHJdrL/3E1cZcaZIulTeH4MnMVslXc4gBqXDE3c74nWUdWLagmTcPtnLttTWaPmVMo3DkxAXU3qUaWacm4jexvRyXCR/3JM+l1ONMvW1LfXoddzGdnQTXqdcKqbnPqK71zKVPpMzllqzY9INeIJjAfwaYWTN8uxQWUZFkqLPhEsvZVqkVKb3n6BD6XHUH/afQciAYGaWUvo33lXwEjO7Es+q7pZSWh5v8drHzGaqEfY54Fgz2xQgpfQSfpH0M+B4MxtRo3zz4Dv7MTHpy2Z2OXCimV1sZmNTSn9OKZ3cIs4IPIu8X0rpopTSBLwL7WZmdq+ZzZ8ZZwyevTwrJh2Ct5adaGYPm9lSeHepgzNm7xm8kk5K6a8ppVfwx0YWFe73UkrXtwpiZjPimf7XzGwsfhIaid9/dz9+0UJK6UcZZXoceNvMZo7vvJNS+iKeENogN05K6a/4RdYKZvYInsHeO6V0Bn4xsKyZLZ1RHmi+nHbCT+Sbx/T/9hXAzIbjLe2/NLO18YvR/8Mvwo4HPmNmM6WU/pdRnh8BW5nZ5bH9/Sel9Bp+Abdp7nzFNvlZPOMOfiJZJOb1b3hSLqWUnsoI12wZ7YpvSxvnxDGzocB/gV+b2W54xfZt/MS4GbBRbP+5eltvN+Dz3VIf2/YteELp82ZmNcr0JL1v3+ua2ZCMMhXr7RcxqVhvr+Hrbesa5XkJ35auqGxLlwAbxvGkpT7W3fb4PvLZGuuu2TFge/xCZXszG5xRntxj0rsZ5XkC+E2T8uwPbGBmgzLjgO+7fzezufDK8c9SSlsBm+LrYVzGvE0H/A9v8d0Sf3zl02a2Xbw/2MwON7NhGeX5GfCemX0SP688lVLaGN/ftjSzFXJmKtbJT/FkwLHAO3hiexczW97M5jOzm81shoxwzzPlMvosfv7cMGcZlco1Fr8QuRhPlm0B7Glm4+P9mYC/ppQuywz5LL7swRN4KaZtChyRUpqcUrr8AyzTg6W/D4iynIUvww0BUkrPtgpiZqPxJMZBMV/zAVeY2Q7x/gxmtmbmOby3Mr2Ad7/OOV/OjV+cnxjfXwU4oajTAcnMFk4pvZdRHvBem8Vx4xj8qUy34z0Ei2PBfzLinIuvrxPN7LBqXTKl9Bu82/xzLeL8FH/yDXjvwnnwpPkJZnZwSum1lNINrQrTy7a0h5mtFO+PiLpHjnKZzseX/5/xeT08J4CZLYpfaD+LNwweZWYfK+rvxfJKKf2rjTJVl9NXM8vU27a0cXxkGN47LEez7egm/NyyM0BK6R+dlsnMZjKzheLaqJVmx6Tv47cX7JhS+r+U0js1y7MyPZfR9FGen2aUp1qmb0eZnsGPk/sBpJR+kBlr4OvvjIZ+pvzBs6IP0/NxhTPhG+LImrF2wjfkvSh1G8RPtNmx8ErR4/iOsDOerFgNvw/6PDK7XUasr9KkpQ7v/rV1Zox18PuQ98AP2M/h3UFH4AeDrFHmI9a8eAvy/TR/msbqGTGKzOXKMR+3A9+vrL97yWidjs+PxC9m7mPKkcYfJFoWW8QYQqkFJNbb50uv58cr9lmZfLr01BG81WGvmL/vV967kxY9ceL/FS0zI/Hs+N14dnw43sX9Z7nzFXE2xBMj3wOeLU2fL9Zby6dW4L0P5mqxjPrclkrb0Rz4hcNpTZbRTdR4Ggc+Rsb32l1vlW37FPwi+Zkm2/aqmeWZPn4uw1uBF6m8/yAZtynF73XxFoVHKvtbsd5adZMsd0cfhbcu39XhtjQH3ip9aifrLtbPRbHeOllGq8Y+0vZ6w3tpzYjvs/fQ/JiUNSgYjdu/joxYZ9Dz9qfryHySBn7snrP0eiv8mHYTfjw/rcZ62xpPsp2Nn++KbveXktdKXizvGeh56854fD8+HT/vXpRZHovt8N5OllF8vtVAyq+QMZByKd6YWMZfpudo8XPj576WTwgo9q9Oy4Rf2IzAe3DdSGkQtnjv4ZxtvPSdUTTOL/PiI/oX43r8nIyBmeP/jmxRpqwBHfHz9FD8gm81fMya8/HE4k9o8tjAPmLNEmX6JnBTafoIfAyZnC7qc+C929bBH+94bsQsHok3Ay0GUy7FWjyW7dZE79eYvmhMz328Z84A31nbN43xypqV6eqcMsVni57GY/H99wE8sTRDLK86A592XKbStjSkybZ0EJ60yNqW8PF3butkO6qUabpetu86ZRqDn+OaHZNuI/8JKL2Vp9YyiljztChT9uO1Pww//V4A/cSKaNL1jZ6DOl0CHJsZa+XYGVfBT17bxsH1fLwCdzNwVhtl3BrvBfA8pQH18MFZWj7Wh8Z9Vp/EK33n0PPexM/ilflW9xQWJ/uP48mER4jn6cb0bfCTeMt7+Suvj8JbKk/DLwzrPE1jOnpevK9O6f79OEj9kBYXJnGwLz9C73h8sKSzY71uj1e0cp4PXVwcndrL+w+T0QWbLj11BH/02eql16tRGrUXTzQ8lxHnQvwi62y8ojUcr9hcEuW6Gtij7vYdsbejg6RL6XsnRllOrbMt0bjYHlH6/+UBqhbDn/mds02Orqy3U0rbd531Nm8lzmbAJnW37fjs8vS8V/cbeOtPre0bvxiesfR6l3bWG41Ba0+I/70afmvK5V3YlhZoZ93hFeIRNLpEnhDL6Nyay2iOynr7VDvrDb9VrrzfFuvsvDrlie8uQ2lEe3zMhT8TI53j54DnMuat1bH9Inzcm1ZxZoz9rVjWn8HPb4/gSaG94nXOvBX7btPHEuLd53+fsf6H4+eTIqFxBH4OeLjOMirF69pAyvixoDg2fTq2gefwwQdnju3hh5mxplgObZapuP1yCF75/wWeZJwf724+KSdOi21kFvyWrJdqlmm6KNOrdctEL7f94cfu8XhC58XM8oyiMWbK8jQeZfxN/DixGvB8jWUyG9GNn/aTLkV59ol5eRzvWTAYT4Jml6eX+HUH+DYaddQDo0yPtVsmphwPoqijvJi7HZW3J7xOfxPeO6BWmfDEwxRPJMDrJtnbUvy/heLv5Trdjnr5H7XKVKy7+L0hfux/njaOSd0qT7HO4vcmdHCc/DD99HsB9PP+hfGeeIWzehAahLeSPVZ9r5dYm+GDj5yAj2xbPPN0Nrwytzs1HsUW3x1f+nsEnvGdmUaF5yZg94w4e9MY9X4OPLHxO7zHwmdiHnNaJM6i0bIxQyy/mUvv35xZngPwruhH4MmS0fhFwNV4BbfO0zROiu+cjic5yr1BhuHZ6ZyL92Pis6cSAxzhg8BcgN8zdzoZLVP483PviYP+E3h361XwE8v0eJLgtMx568pTR/Cunk2fJR5lup0WF29xQL4L78VzKz6C7pZ4a37xSLbs+6jxHjwX4Sfr5Zq8/1Dmemv23UXxFs+H4n/kLKOz8JbVc4h7NmmcLKfHWyaz7jHHWwpuIe4hjmmL4S2u2dt3s/XfzrYdn/8+fovLHqVpc5S279Myt+8iqXQOTY6LZCTLmHLQ2pPxBF7RsjSOej0Teu01FOvu+px1hyczboz9a1xMmw1/pNp3aiyjYv1vWppWbEtDYzvL2ban2G/x88DJdcoT3/sRPuZBeVC62fFk90Wx/HMeOVscuw/FE8jjKu+fR95FTdED5ILYpsbHstkPv0f8MEpJmBaxzoplfhale5tpnCe/QcbYB/gx/vYoU3HxPiuewL0YP7fn3lfelYGUS7Gup3TPbyyv4/CE4P14q1ufyyviHBPrbyJNGiJyyhRx9sVvLTkrtqMZ8F5LJ+PJsrPJfJQifqFeTrhUGxxeoUXvIhpPhflWlGtYrLv1Y71llSni3IJfhM7Uy2f+1Ko8pVjnEY+xjWmL4LfifRtPcl5JxgDGsWyn6K1HjaRLlOcg/CJ0+5i2GX68Ox/vufYg+YO7djzAdy+xN49t9Gz8nJlbpg1ifooeQUMr7/+D0jm5xbwdT897+D8TZTo/t0wR5x766ImYsy1FnFfj/4+KacU1xSn4QIy525Hh4wjsgZ97m6233DLtjPcA+CqewBuLH5OKulTLY1LE2g5PJH2pus5yy1Mq0x74sW23mLZ6rMtaZfqw/fR7AfSTwO+x/X5scJvTpLtgs2m9xHqGePxfHAjPwO9Xrv2IooixAz5Q1mk0GTwEH2ug5eNT8BP/Y/F3uVfCYhH7a+Q9k3t3vEL6HE1GTsVby3IeC/TlOMgui7fkv4afXIvBBVuO5l2KdQB+AbMonqk/FK+ofhk/0c4OfD0jzl541/3lYpk8X17v9FKxaBJnOrxr1orx+rKI+wBeIc1OKNGlp47gj2T7brH+8UrXsTGfS+Gtcvu0iDEIbwXZIF6fj7cu34TfP1/r+fBRhvviYH9J7IeH0riIG0veYwv3iu3nHJqMRk5p1OkWcfbGLyQWjG1qEj1vUZiB/BPa9PhJ6zL8AvXCmJ9iEL7c7n+9rf8lYzvL2rbj+7vglaDl8IrQdpX3cwf0KyeVbokyfga/HWsOvEXh9Iw41UFrl8YrFJdQ6lmQWaYD8ePSS1QepRW/pyfvou1LsQ3Mgh+7J+KtbJ8lWtPbXP8Xxfr/DN4Vey7gmIxY1f12X/zC6JhY1i2fMlJZ/5fhXaWvp3L7F/nddqvH7lfxC7h1S/tJy8HKYl4ewhPJq+Pnstti3qaj3rPYq/vus8CClc+MyoizO37b1zgiMY63uq0c72edA0rxJtKFgZTjsycCV5W257mIHmZ4C+pclEb5b1GmG/CW+vPxY8h6lTLl1AW+jidYl8YTcP8C9i2936O3X4tYJ8Q2uTGVATTxpMAQ4uI3Y97uwPfZS6kMDhxxcnqXLYyPA3NnxFyiWHel8uR24Z9IY/DqovdL+THUMxINTy3iGF4XfIrG7QVDKp/JSbocHctoN/wCaxCNc+7C+Lkl6/hL9wb4PqTZ52JZrR5lWi5zm7w75q8YjG+60vvL0EuP0SaxNsfPKb+h54DBhj8+eomc5YSf574Tf08f3/sijR4iQ4G9MuIcHfN2HnByafrgiDFDznZUWk63xDxejTcs7lN6f0hmmYr1vxuezPo9kbSP7Tr3mHQ0fk45DD8mbV16b2iUp8/6aSXWbXi98NJYLnOmxv6XVaYP40+/F+Cj/oN3Yfxm/D4Ez/AdSWOU203IHAUUWAk4I/4eGgeireMA8gRwbhvlWwc/qR0WO8kh+C0LRWZ53Vbli53xURqPUdsBrwBehV/U5T6ObTjeLW5evDL4HeIeJBqtQCvS4gkW+AnsKkoj0+MXthdEzD4fVdck3sHAzqXX8+Ct5meSOfo9Xkl4np5PqbiEUqtnTrnwk81swBfj9Wi8VbDo0rtPrMeRGbG69tQRvBt5cYI9Aq8A7hYxz6kRZ0+8ReUE4M+l6Z/CWwfnz4wzGD85fqI07WL8ouAa8pMAQ/HkzX74ReVd+EVOuTUo537iocSgRKVppwM7lV73+UioJjHXj/W0SmyjT+HHhNwkUF/r/2JqjDAdsV4lnipB4+Ktz0cnNonTLKl0D55UOqO8vDJifRavAG9aif9VvPdLbgJvMH4xsgZ+bJsc21b50Vw547AMo5QoxRPCD+O9n06hVKnsYP3/urxtZsTobb89Cq9Y5h67iyTncvH65NhXsh+DXFo/vR27LyUzURbf25VSLwa84rcsfj6eUCNOzr67XOb6/1lp/V+Et2ZdjScrd84tU3x/Hrz7/6Lx+sDYHm6I/XdsjVjD8PNRsd+dF7Gew2+byh0/ZVa8y3fxDPbH8cTJ9/GkbO4YQ6PwhOvCpWlX4Qm9R6lRYcfPkW/HNnQx3rixOpkXR6U4Y2L9FY9R/QTeA6roIl4n+TYET+BMxI9rd+J1kz4fw9gkztz4Mb9oXDgCr2M+jDcwtBwbqBTrqNgWD6bSuxE/Bg6mlx6IlWX9Co3bJZ7Aj90PxbaefY7De0k9V/4OfjvH43hDQ9Y2gNcn/x7r7he00Zsh4owlbiHD62H34beAnIBfZM6K12Fzn+wwC34t8JlYX7+IeC2fXFZZLyfSeKrIJbGNT8J7nuXWT8fGsi4SiU/jx8miB1XWeSA+OyvwYxp10rnx8+cLeC/FkR2s/xXw89wdNdb/PLHui+T/F/EGjzniddYTVEqxflxaLk/GvN0QMbPHlvgw/vR7AT7qP/i9wAuWXi8XO+qF+EXzX4BPZsZ6fzAoPIlQbn0bg3dVynqUSyXuuXiry5p4V/w/AGdnfrfoYntM7OyGXxBug7e8XEZ+l9KTgYvj7/nwCuT5NeelKM+++MlxTTyT+ThemTwS+GbNmOvjFfVyUmEwfnHxXfKeXT4Sb42ahUZL0p7AhfH3oeR14Z2p8tooVbJjG3m61UG7tJzOw7PHV+KVvuJAuQ/1LrrG4PclHx/rbYaYPieeqc6tSBbLdW/84naF0ns/JONiqTRvh+O9JBbHLwq+G3+fQl7X5CLOlvitMiPx0bvPwE9om+KZ+MMyYg3Be7fMTyM5tjXxmD48oXd0jTIVv4+n0fXyxZjH+2k98GXx/QtarP8+t6NSvLmZskfCyfixYI3y/8wo0170nVRaIKdM8Z1uDVr7cUoXaHgy8V28Ev51MpNmNBLJo/AKabGfrIxfVCyYW6Y+1v8D5A+g2Gq/XTkzzlJE98/StPPwBN7C8brVuAjFfrE/XTh24wn4V+nZojkYbzm/P2dZ48fXofg4RX3tu0dlxBpC4/Gbs+PHs5Hx+tOx/rMTMHRxIOWI94XYlufEk0HF2Az7k5nsimV1RiyfnfAkQjEm0gFkjhMVnz8RH4TT8PrA7XjC6STqJbvHlpb7eLyOcwl+TBiNH/taJnNiezyiMu0M4FPx90l1ljlehzsp/l4bH2vkUbxLfW5vnvXx4/SeNB+8OqtXH37R/aP43ix4IuBmom5D5kUXjccDr4jfFvDLWP4zxraV1QpciteNAb43J44B+MXkW3giqOgVsitNesM2iXMYMDH+XhnvzbEufl66msyBKuP7xbluZ+D4Utn+iV+wzlEj1qZ4EmEZ4K7S9J3JTAoTDSal18vhF/9Z55FKrCHxf1eN13PgjQSjY3rLZV2KdUgv6//kGuv/01Qenx7zVtRLDiU/8bIVjR4SS8c+swhe7z6JjFuxP8w//V6Aj/IPPQffq95ntQJ+kXppZqzReKV2eC/xJ5I5unST2OOAE+LvPfAM8+X4BV1utnU3PAN5Jj0HZds5DnY5z2NenZ5jJcxJ457VWWhxQRLfsdJ3J+InxXuAb8T0BeJ1reeZ48mRC+NAVm71fpEmg+H0Eac8EOc4PNO+ciy7cRnffwRvRZw1XlfH5Mge3LNSjodp46kjlW1w0dhufg0cHNNmoHJrRy9xFsAfxVbuqvl5vPJ3LN7KcUrN+Vo8tr0r8Rbl02P6SnhWuU6LUrFdDcYrSJvgFbn/o8ZFID0H9pwFv2hbH7+4yFn/M1Refzb2j6/jFxczxrSmA8f1sewfotQqlbv+e9kGy9vEV6iZwIvv7YBX+K+j5z3dP6RFUokuDlqL9wYqujMOpucgiLPhLRTv5ay7vtYlfiH2Q1pUJJnyPFKs/4kdrP/FYr99g+gWnLvf9hKv6Go7E96SPzHze0WyfERszzfR4bEbb9G8IbajoifODPio8C0r7fQ8H5Xvv5+17r7bJPaI0t/T4Um07PUW3+toIOVKrHnjOHAP3uhRDGC3MvEYucw4E+LzJxMJ85j+idhG+4xTrN+YtxfxhpJbaFx4fQnv3VNn3kZU1t9WeDLgZuBNMhtiYht8f/BLPGlyAZ7wmETmQJqleBPxnk+7xfa+HZ6sbllfKsVYAe918T3aGLw6PrstPce9GYkfV8q9cHKTCofEMr2AUvd/PDH/3ZztqLTtdTTAd2W9lY/dJwN/jBi/Ir+eW2ybG1JKHsU816qflL57Cn7c/mKsr2Nj+eXezjMC7+10PZ4Unj+mL4Q3EmRvS5W4u+G9X2r1nIrv7o4nyG6O7frk0nKaWCPOWl1a/wvjychhpXIchzeETMpd/5WYy1LquYXfPnhTbpk+jD/9XoCP6g/eYvNu5SBdPqANw+8Hmisj1vJ4xvh6vBWjfM/WIDybmFVB6iX+9Hjl42C829VaeFax5WMC6dk6vjVeGXmDxlgFF9Dm+A7x/cXiYNny/sb4/I54dnwQ3go4Bk8uFCeoa4mKSUaspSPeaniF7+P4YEO34BcStxC3oLSIsz7eArh2aVpRntNieR2YEWeDOLieRWnQs3hvKD5C76M5BzS69NQRvItleaT7OfFWhaK19DvEBUEfMTbFK5pH4q3l99Bo1dwTv0fwcDIrkHjLz+fxSsToWH8L0mh9vSZz3rbEE2RNu/7jrZwnZsRZLZZT+YkX5V4U75LROyU+fwNTDqC3Hz7ydrHPtUyU4CfDPSi1qtAz2VHnqTMH4xX02UrTiu17HJ6g3Dcjzpb4BegnS9OKx4NlJZXo8qC1eKLtdXr2MjMavYzOAr6VEWdk6e9BVCrVeItJ7v62TGXaV6KMxX34Oet/afzCb/V4PTfegpy935ZirRzfLT/2sFj/S+EJxpYtpbGsb8Qv+EfHtjMHja6qWcfu2N+OpDHo6Wx4K+QT+HH75px1Ft+9l14q1PE/svZd/MLoaHx8kkWptEDjPfnqtN53ZSDl0rawCt5de7bYFv9VLMPYHlo+CYXo8l1sg/hF0pX4mBhzxrLMGUz5SGCx0rrbDO9qXOxzj+SUpxTPaBxvq8nPP5M3kOrnqjHj9xyxXf0wc97Gxr5WtI7PjZ/T/03j9s7cp3ssEvvXSBpjAYwolS138Orqtljsu1vgPUyOpEbyJr5b3N/+/v6D111zt8luDfBd9LhrNjDoEDwZnDNw7WK9TC+W1U1Uemj1EWtpvK5WjFO1HX5L1TulbaBlz1C8QaL8eO1v4V37j8XPc/eTN07BfrF/NRuocGP8uJv1+EP8mD03fk1RJLcXL+27D2duk+UxV+bAG29+iycC6qz/OfD9f1Rl+lL4ceRJMo8l+HF7GSq3ktHGMffD+tPvBfio/uBd2F7Du3z9sthBaNzrNgel7twtYt1ZHBjwrPp2sZNvEtPWpd5z6xfDL/5PolHhnh/vUph1q0N8Z/PYKcutOItG3FfwRMC9tO7qulwc1C7Gx3RYmJ7ZyC9EvFZdphfBTxCn4tne4t7GGfBWhUWivDkX3J/C79U6BL9v8yG8y2PRJfDz5A2Yszl+gXMUXnlZKg62xUl/FzyhkFOmx/ED/LrxnX0q789VPdj1EqcrTx3BW+kfrUwrJyU2Iu92kMeIrnV4S89P8AukWt0jS2V6Ek/UPB7rvXyhvDReAWw1b1vg2f2jY1kvRM+LwlnxSk6rOJvjLf0n4wnEb9HzgnQT/IIrZ/1vATxZej0Pvt+uQ+Meypw4W+M9YopHX16JJ72KbXKFnPVfWl//wFsMD6VxnJuHevddFst7It46swieEJwTv/3iRPxiutUxoGuD1uKtUA/iLRln0uieXlQgRsQ8t9oG1scvlLendLEQ28EgfECwe9rd3/Du85vXWP/VY9uj9HxqwQZkDlpLI4FzPH6ee//Cr/SZnAHhimTp2UyZMBuEJwQfqbm/TaaSgIr/k1s53qyyvy2GJ6tXjjJtSMa+i7dcvRjb9j34LYY7xr4zKOI9mLPuIl5XBlKOz+6E7+t30HO8iSVi3zmQysCDvcTZNrbx54D9S9NXwhs7bgXOzIizH/C9+Lu4WJuz9P5awI2Z87YpPROm7/csiNfzATdnxNkTeLgyrXyeu7woc8Z6K57ucRFeT5oTP95uX42bsd7uJ8b0afL+F8kYvDo+ezWRMKGUfInXy+L1zyVaxBhPk/vH8XrwK/i5+NLM8nRrgO9t8HPbJPw4/XlKDXh4Ei1nwPFd8d5DB1Ia06P0/peK8maut8diO9i38j92zN0GSvvbDyr729L4rRk7kJfE3bdadkrHazyx++XMedsZryffgSclTylvN/j56/aMOO/vb5X1vxx+fjgsc/0Xjzq9Ofa1vSmNu4Dfcvh4jfV2f+wLhzd5fw8qx4hp8affC/BR/okNeiH8Hp638C5WLXeoSowxcdIoMq2v45W3ffHuSO1cdD2JZzGvxLtKLo2fSNai0e00p2L6HI37B+fEM+SL0ciYL0Tes+tfx7s3n4wnSk7GKwPlLsFZj3fDT4634if5k+JAfQGNVq7cbmTfpTFA1Sx4pflJMu6VrcR5lkZX229GuZ7HL5BGxvSWFVy8Unp56fXaeGv1J+N1ne6fXXnqCH6vX/FUga1j/n5NZstmfG8mvDV86dK0M6Nct5N5D3fpu0/TuLi6BK/E307pSQXkdeN/gHg8Ep4EuAJPLJSTcDm34DxTWkZz412vqy1CuffLHkmj0rErXqF4BU96LFWjTLeW5m0wXlF6BDgupk2fs/5L8U7BT9xHxzZQjPBcvj2rVSKgt+V9JJm3FNHlQWtjH9kgttG7ab9L689oHI++TZP7v8noct3H/nZY6TM56796bHuKOCe0MW/VBM5pEe+4mnGqydIperSQcexusr9dn7t/NYn1ZaIVjMbTGb4X67CY55EZcS6ncUwaitcLLqPUu4nMQWLjsx0PpByfmw7v2bRI7CO3xv52PPDpGuUZjh+HlomyPYEfC8o9MmdutW3ix6K7aJzT9sZ7AD2M39YxEk8ytrwtJMr0b+A/EXPe0nvlcvXZGyDiTKIx2OwGsdyPwRsDhuK3cvTZOBTL+jW8TjQmtq3vx/pbv/S5nP13ON4jYolYd7fgvZT2xc8LQ/FjYZ+DV0es3fCGju9R6UGHJ7umozRWVx/l+WWsp6bHa7yxI2f/7dYA39PhCcVl8YTuwXhD0+E0er8sScagwXgr+wN4IuM8vC62In68KsZGajloKc33t8Px8+WBNHr3tEpQNtvfjmn1vcxlfRJ+7j2azKcyxXeH4Q0Bi+O9cFbAGwBuw5Onhvde6HNQ3Zi3J2iMC7QyPobLl8hsgC0t61/ijZPL4Mfsp/AekEVPvs3I64XdbH87iMb+NgRvpJviCWDT2k+/F+Cj/BMHsnvi70XjAPcOfp9Tj2x5HzFmwitqV+PdUCeV3lshDnB1Hn91IKUMP34CyMpAVuKMAW4ovb4fr6jeQeZj5uJ7qwL3ll7PFWW8nsYzXutcLC+BZzhnxk/4b+EZ4eXJf3TSMPyera1odJO/DG/Zu5MWJ9hSrCVpjKA+HL/AWTcOuDcAV9eYr0VoXMhOFwexI/CTeJ2Rk7vy1JGYtxfiAL0efiG/WWyTj5B5j3rE+hp+wt4ZbxUr9pn9ychEl+KsTQziGevxbTxzPB5P7OQMnmixrs6O7WhlvGfBinhvgGti3eVcAC5FDNYX66t4WkDR22grMp+hHp/fDL/wWjJ+zxf7y8n4SW6KZz1X5y1+HxfLeY7Se+PwfbhWwiy+O39sQ7PgvXheje3h87QeO6PY385ssrzH4QmY68l7nGLXBq3FLxJOL70uWshPJHoYkXcv8Oz4frokjVumzscTurPhXdX7HDm9ND8/Ysr9bUW8UpjT+lss67Pp8NgW31uJxrgk5QTO4rF95g7s2ypZOjhzWS9FJI3ofX+rc7vLGvjF5KrE4Lv4eWUPvKKcNd4BfrF3Fz2fWDAPfkFxPjW7k8f32x5IuRRjPHBr/D0T3qX8IDy5fwWlJxK1iHMo8di6eL0Q3ipYdMdenBbnXhrHpoPxLs3F4M5r43WnC6lxLzfeiPOt+Pss4K+xnIr/sxG9dGOvxNkHv3gbFfvgc/hF4L6xjJbLLM8C9Kx3TYcnmibENlBnXJBDiScB4A05f4zl9iX8fJ7b87W4eJs/yvEYjdvm6vQuOxCvh4zHj+O34y26RU+uxcjrwt/NAb4XLbbt0rRP4hfM2U+dKu0bN+C3W0zAExxvUWMsgBb7WzHeSbf2tyVokaDMWNYX5S7riDMzfl1Tvu1xFjzpkv3EGbwn76/j79nxetupscyvrBFnBeCWSvmujX235bmyyfIuBk9ve3+bFn76vQAftR/gY/jJaul4XdxffH1sgIOIARAzYh2MZz/H4hdXa1Cq8Ee8O2qUbXAcQMqZ6DUpddXBLzZzehUMxVvtvoJn7E+M6cviJ6bPZpZpHrxC/GVK9znhvSV+TP4jNYsD5Cz4wX99vAL/M/yC62oyxqsoxdsWvyfqCPxEdkdM3wK/pz/3fv5ipOy56NkSMRyvwI+qUaZm4x5MjOXXsnIUn+/mU0cWifX2c+CS0vT58AvBnIvuYr1tjSdxdivKh5/8s+51js/PTWTA8YuAXUrvrYT3WOhz2y6V51N4q8gVwEOVz9xPXivZDLF8y2OebIxf/E+Hdzf/RKs4lZhfw/fhHo++xLsa5nZTnwfvtbMdfqFdtIyMiW0++0kxxHgA+EXqBLzF5Cd4Jekc8isAW3eyvOnyoLX0vD+1GMhpPH4c2bjmOhtSKeem+HHlIuB/xL3+Gct5Qfwisu39LT6/A34+OpzOjm0zFOsFv2hsK4GDV4iLHnhDaT9ZOiOeVCj3bCvvbz9uY3/bG79Iuoeex80nyHzaSOwfJ+PnylVL8zo44tbuQUF3BlKeCb8QeSP2sSI5ZHgL5Wnk9VRcjsZYHMX57p5YFzPh56fc59dvircGnknPnjcb4Of13FsCZiZ6FZSW1/34heDB+CNOc27Hmx1v3bwnyrV/6b1vxHLKaRiaLrbDu2L/O5FIosU6yx5NP8pUDMq8PT1H5z8Ob/HOKdPJxG0Isd8dTWUA7cw4s+K3DwzFz8GfxY/ll+L1qBeoN0hsNwb4ng7vwXcpPZ/uMyLWwZY197dlicYN/KL3F/g574Sa22Q39rflieMYjXN3u/vbl2JZn9Xusi5951T81q5Nm0z/eua8zR3r7Yf48Xr/0vZ5MRnjncTnZ8STvmfhx5RjaCTh7iAzeROfn4UYCLqT/W1a+On3AnyUfmJneBqvlL+IZwoXxru4P18z1px4l53i4moo3sr1KN499QC84t7yPv74fnGhNBY/uRbZ48Gx8y4YB8o6rebzxcHiXErZ2jjI5tx3WdxH/nH8pL0jfvtFUYE/F9g1I84gfEyK4nujI94LNLqm5mbth9IYJXdjPDu5NY3nap9ERksQXmHYp7cDDX6Bc0lGnCXxk/OZ+EXkzpX3Z47lndMC0JWnjuAVsi3j71H4xXr53sSJxOM/M+brHDzLWz0JLYHfCpN7kVyet/e37dK0o8l4BGll3hbDL26Owk9K8+Hdga/K3Ud6+R+X4cmNlgN6xufL9zSugFcEv49n7XeK1y3vK4794pOxja+Bt5BegLfmbY7futDynuJeYg+P7fBlGrdB5XQnLc/bIrG8j66zvJnKg9ZW/teOeBfhlq03+MVj09tjYh08TN4xoEiWFBdrK9GzC3fL/a1JzM/hx7YtaQwElnVsi88u2mSehlTK1DKBg1eAF6ZJpZz6ydLZaJ50rbu/rUojUTIv3mL/KH5BeyiegL0hM9acsU2Pje+di48TsC9+jspuEKjEbXsg5fj++9sl3mtuC/z8Mi6mn0uTe4Uz4hb1jFPw1tezyBi4thJjY7yO8w4xBkKsv6yej8Ci5fLQ89ywIj7OUsvb8srbN34MOYGevbrOI6PXWyXmHrHff5lICONJypYDzMVnp9gXKuU8N7dMeJ1vptLrWfBzQq1H8zaJOxQ/d34B+BuZT/mhywN840mFo/HEzw6lbfsaavbKxZMmN0a5foY3Gn6C/McNFvXHYn/bEk+4trW/MeXYV53sb5/rZFlT6mWJX3DfjjdQLRfTrqTyuNVW84TfPn0JpSc64Qn4nMd9F/W/ufHGzu/hPQKLpzVdS/7jVPtsPKizv00LP/1egI/SD36CPyL+Pjx2gNPwruXFffS5rQe708ger0qje+SVeMvmwWS0bJXizYhnZ8v3qhc73m5R9km0fiTbzLGjrhmvt4wd/xE8C3gAGdl/POP7NH4xMBhvLT0fP/h/C2+J/SWtu0x/DM84X4BfZBVZ5EOA22quv2XwC5LLKD3uKt4zvBWu5UVuxPkN3mK0UJM4S+EtSi2fOR7L9WC8J8kW+K0B91MaW4AWXd3jM1156kgs78n4CWN4NRZekXi51bxV5uvTeCXrARontKUoDajVxrxVB5Z6idaP5Cvm7SJ6XpRugfcAuDLK2WofGYMnRIZVppefNPJuqzjx2SXwCvDeNBJmQ/FK9954Qu8IWvR0oXHRfQMxjgte4domYlwR6zSn50Uxf8W4JMX9tgdX95vMeduXnoOcbVZzeXdt0NrqvPWyPW1Ki1ZlPPHzRHUbKL0/Mz6wXqtjSXm93cGUFcklM/e30fRx7sGTOS2PbfHZFfFeaM1GBS8GdW2ZwIn97aHY9k5v8v4IMpKlsQxuwI+3V9DoHVhsn9fU2N+arje8W/iW+LlpF1o/Tndp/ALkFPz2j+1i+pp4QuES/HyZ25LYlYGU43vrAEc3mX507Es34F2NW93LvQR+bDyWSvIwylvc+tQqztJ40uY7NHqYDcMTua/iiZcbWsWJ722JN5A07RlDY9DfVmX6GF4HOxu/7WJ45f2N8S7iufN2BU3OrXjPi5ZxKvPW9PYv/Lg5KTNWbw0d08c2lvPkkqXxhPb5NDn/xLz9MrM83Rrge0n8Vrtv4T2fVsDPLyfh55JbM7ftYt7OLeYNP+49QY1zXGm9PEKlnobfQnFHjf1tOfzYcSGVwctjWeXub8vH/76x2Cbj+9+KfePCnGVdWm/vxX5S9JZYGq+TvIqfh1sOOFuat8vxW/pGVt5fL3MZLY8nfa+l+VOnNsUbeOvM2xk06dkasbL2t2nlp98L8FH5wVsynqBxX+pP8BP0Z2JHnaLC1CLeWBpddE7HKzELxIHkeGp0S44Y1+OZ8Nvx+wA3Kb23HN71Nuckcg1+8X4PjcGvpsfvvZ8QB6q1MuLcgrdCfLU0bSx+MbBLLLumj+urxLkvDkRz4VnNZ2P+lip9JqvbFl6ZOhC/QL0Iz0LvR+Ne3tGUulH2EedW/BaVw/GT4YrlsuD3iefM25x4a0G5a/twvIJyFRktwKXvdeWpI3iXva/ilYjniS7JNC4q16DS26CP+Zqtl/mq1TrS17zhyaoJwBfbmLfFSu/Nj7fojMyI8+NY77vgybdqS+7y5N8StAR+Yn4Z73FTPM3hEzR6+OScHO+gcdF9ViyTS2nyqL+MWC/1Nn80WtFzBuCqztuWMX0l/Fi3cKvlTZcHrW0xb0WlJKfL7V00Hpe2GH4sOYGej8Rs2WOCnsmSM/EWoGtoJEtWbbW/xedexnsgbV2aVrQmD8Zb4HJvL7uNxvg28+OPadydOH5QeUZ7H3HuxhPQi8U+txGeUF6z9JmcZOm9eOvcyvhF6Fcr769QY3+7s7Leiu6yq9Tcju7Fe6jNFPvaP/H6wXLV5Z8ZrysDKcfnHqXRwGHE8RY/v60f21ifo/rH55/Dk5qn4PcVX0DpnFTeB1rE+X5sB5fjj3pdq/TeELznSe68PU2ph1Ssvw1ptJZ+goxENd7DdAJ+YfQifm4oBjIdHOt2uxrzdkXM29qV9z9N/rgAvc4bnhw+iLzxWIpkyRl4gmsMPRsXNoz/lfNEnX3xC9NjmfLRfNvllKe0LXVjgO+f4o1RF+J1wa/hDTxj8J65G5A34HR13ook3no0Egy522R13tbCL+DXini5+9vrTDl4+WY0bs27m7z97Vn8loar8V4289A4bw/PXdbx+Xvw2ybOoXKMLcVq2YjaZN5Oje17ePx8jbw6XDFv1+LJoPlj+ykaY3am8gjYuvOGHzOnx/frrO17Wvnp9wJ8lH5oVGpnpuf9fyNjZ18oM04xMNylcTC6m1KLGF4hyOpqFZ/fEq+QDI+dawe8e9MdNFoEWna3wrtGPYC3HE2IHW0X/B6iOuMTbI1XuIrbQa4is6WmEmcWvHK7cmX6zrGM1qgRaz3gvtLrN/FWpIPxi52taizrB0qvv4YnJ1q2jvUSbyJwXmXaSDxJVfdiqaOnjuD3Qz5cev0NmrR01Zivy3qZrylGeM8s24Idztsjnc4bfvFyN55IvBVP5G1Oowv1gmRcJFVifhq/R3V9vDVrMvDdGt+fl9K92jQuuvfHW8n3rxFrWWLwvsr8zRHvL0zmUxni81s1mbf7a3y/a4PW4hdpd/Sx7hYirwfHKniCaw0aI9cXAzH+gPxjyZyxrEdW1tuX8WRJ1n6C3y7xAH67xguxfZaTnGPJHNE71tPPaXTVvQ2/B/y0WHbjM+MsSM/j7at4BfAI/JywTWacdSk9si+2z6doJDrnJb9y/An8oq+63o7Dz8NbZ8YZE/tbORl1HJ4kv57Mx1aWvtuVgZTjuwcDz5ZeXxjbxiu5yzy+9wVKt2vgdYMf4hfQxRMtcnqE7E/PwdN2pf1zyijgytLr78XyvhA/r2Q9SQM/h99Zev3JiHUbNS4gmszbbrQx6G3GvH0zZ1mXvltOlryAt5BvUPlMq5bgrxC3x+G9Pl6kUV/dsea8dWuA7w3peUwZhzfGZY/p1ce8XY4nWbL3kV7m7bt4I9hteONF7ng1fQ1e/qWY1vK8iyedbo2/p8ePu9fjPSha3qbcJFaxnHbEG1Kynw6TOW+75myPfczbdTFv+3c4by9Sc9yNafGn3wugn/crPU+3+d3tYsf4Pp5Z/kzdWPgJ8qL4u3xf+TepNxr/DUSGHT8Z/RjPrp6KV75GZsZ5lJ6t/hcBO5XerzMQzM54RbTaJXEv4Gs14ixJ49FQG1PqRoonYI7OjLNWKc5gvLJ+LZ6cyO4aFet63Yh3O95yumPp/QMojYyeGbOjp47gGePVSvO2Et7qlj1qLj7Y4Rp4l9ZL8JbTXTuZr/jeZr3M2+WZ87Yh8ein+Pz4uvMW3x1K6T5mPMP9EN66vDRe6c4ePTlirAQ8GH8vht+a8hp+m1LLpx/E98oX3eULihXwxGDuRfcixABs8XqPyvz9IGf+iMfI9TFvl7SaN7o/aO16LdZd7rx9Dr+APAGvQJYvBncm/97UpWK93dDLestKluC9jzYuvT4ylvFZ+K1w95E/EOvH8B4Sh8b2dF1pfR5HYxCtnMHc7oz1fBHw/dL0z+Bj0OTEWIq4H5ZGj52ziWQrfl7KSizjCaNj8STwffS8GMxeb/H5S4knqcR3H4np55DZQhaf7+ZAyobfCvi92F+uiGU1HD+2/xxYMrNcq8R6K+4N/3xsV5vF9pHzVA7Dk2NrleZ1GeCZ0mfOo97j667D6yOHEo9BxXs7XUf09MmIsQTeMv3JKNPn8AvTNfBu8zlJxUF478a1+5i382vO2/V9zFvuYHVb0iJZQov6V8zL12g8fvEYGhdym+H11exH6NG9Ab4XwBuqNqPnwKxb4OfyBTNiFPO2aJN5+1TMW9Y+Upq3u5rM23L4rVC5reVzx3rqbfDynN6lhvdMKdbbUcBN8fcaeK/qrEFr8Z5Dx9NzoNrtYv9YN2c7Kn1vDB0OzB7ztkeLecs9DwzGz90Llaa1NW/T2s8gpF+Z2Qz4SWBimyGuwbt034ZXbJfBM5t1PAnMZGb7p5T+V5p+NDCnma3U15fNDcUvsh82s+F4RWmdlNKV+IHyT3jrZKs4w4Fvp5QeNbOhKaXJ+AH3EDPbFSCl9N9WM2RmC5rZmvgF2mjgV2a2T+kjxUVhS2Y2Du918W6sr0fw2xUKI/CL1FZx5gf+DrxjZjOllP6XUnoDT27MCnzbzGbNiDM33n10X7z3x77xc6CZPW5mR+NdLk/LiPUxM9vIzJZOKd0PPGRmW+GV/yPwXh5vR1lTH3GWwtfxu2Y2JD7/LH5BN5+Z7RXbSF9lGYFXqM/A94dj8IuHvc3sMTM7Lne+It7mZrYOQErpzpi3z+EnumLe3sqYt02BP6SUno5Y/0spTcJPIlnzFnH2wffP75uZRazz8d4Pb+AnpD+mlO7OiWVmq0aMZ4GLzGwHfL0dlVJaEPhFSumfGXGWw5f3s/h29d3SR1bCLwr+k1Gmr+AXEj8qpqWUzsO3gTdj/v7Sav7M7GB8JPBhpXn7PL7ejo55e7WveTOzOfHu1k/HPnY23itlPTN70swOwJNTh/cWoxLvADyB8HRp3s7Hx5j4VY15OwCvwB6Pt1L/hJ7H/hnxxEmr8hyIV4qOojFGzAOlj6yE9y7rc72Z2fr4bXKzmtnMMV/H4hcShj9Ob7qU0qMZZVofr7Q/jN8iNxN+DiGl9C7+6MKl4nVf+9sGZvbZmJ9/4xcTd5U+Mif+iMVeY0SchfDj/DNmtljpvHEPME/8j/dSSo9lzNtC+Lq5GUjEiP6lj+Sut4XNbAU8UVI8YWJhfHsAv+1kxVZxIpbF+fpC4A0zK+pzjwOzxDnwKLwL978zYiX82HtQlOmTeM/Ef6WU7sLX6zI5ZcMTwQYcY2bn4S2Kd8dxeBDe6tinKM/1+KCSxXH3ReA/ZraSmR2BN1K8m1km8B4O/8K3w9njXPU63hV+jswYr+Hrfxt8gO098ST3Y3gvoT7rFbGs38OTgD/vY95GtJq34jwSdgL+gyc3q/M2V+a8/RT4t5l90swGx/d+iCeldjezWfuqf5W2ydNSSj+N+tyT+PG8OA/fTxwHcsQxbA/8wnI+fP2RUvoRnlCYPzPO63h9eUNgVTObLeqYt+GJgFX6+n5p3s5PKf3MzIZV5u32mLeP1Zy3ffDlPB9+6xMppR/iF9HzZcZ5G090jAM+bWZLm9mwlNIj+DJqeUyJ/e0S4GdxLLkVr5sQ2/Y9+DhhOeX5L3BSSuk1MxsSk6/Ge5XsZmazZdbjB6WUfoNfQyzQy7ytkFmm82KbHEQkyCrz1vL4HZ//H3BMSukXnczbNKm/Mxof5R+8srM4bXZPbBKv1uAf9LwvbmW8K+mLeOZvKH6C/Skt7t+iZ6+G4j7ikZXPPE+L0biptFrQc1Tw9fELnS1o3Zo8N36geRRv/V8Tz2Y/hycDzsQrPMtlLKO58Qra9/AWs+0r78+On3CXzYzzCF45/kJ5HeAnoZvI6wZaHtzzKBpdNvfCu+JvSl7WtitPHYk438cr2z8inixC4969YtChXm9bKdYp3mpzBF5JuxNPlFyEX8ytT/7TOEbgF7GT8B4qS+G387wIPFlj3kbg3cd/gF+ULhvTi3sKP91q3uJzc8a+tEBp2mB63qr0buY2OSd+ITquNG0BPFtfZ70VZSpn2ofjlfjn8dbKrCfF0HjqzLhi3eO9Fcojqv+t1fxFnNdpPEllBvy+1j8BT9WYt64NWhtlejWW7+3AfDG9PDp8y3UXcX6BH3tuxJOIM5a2pVliuS+bWZ5X8IrMnLEtXR7r89Cc9Ybvtz+M/eNufOyO6ek5mvo7rcpTiXUefhwYh997W9wyNzN+/G01b0Wc8/Fj9+J4QvgxvFfALjGPreKMie/cgCcgr8OPLfPg3cNfwhNBS2XMWxHrRvwe3ovx1vaF4/3ZMtdb0dJ2E54I/jZ+vB5TivNL8seq6MpAypVYHytNK8edJ9ZLq4E0Z8RvTVskXm+IJ07Gx+t58e0/6/aCXv7HVjQGqqt9qyA+psdE/Lj9nViXPyHziUERYzh+PlqxNK8L4ceIVoPEztBkvRXnv1rzhu+vI4nH3OEJn7bnDU8CHY7vf7fiSaRi7JNLaPH4Snw/n4uePbnKt/aMiX0lZ3yYbg3wPRc9xwA4CD/efR2vM32JvMG052LKwVjbnbdymQbjt/legvd4O7bGvM1HozfvEDx5fwE+bkmdwcvnp9JCH9uClf7P8+QNFN4jVmUZDcXPWQ9Vl2WTOIvj9dsZY962iXk7uea8LR77wQxdmLfF8frpFLel1pm3afWnWKDSj0qtAx/0/z0Tv8g6MqX0u5j2Zfwg+0P8xPdaSmn/jDjD8VsIJse0wSl6O5jZqfgtB3u1EWco8L+U0nvR8v5WSumiFnEuBX6eUjrBzD6F9wAZn1L6S/Ra+CfeEvyLvuI0ibUJXgn8VPJMp+EHtZlSSoe1G6f0mYVTSq+2iDMvXjleP6X0DzP7CZ51fxm/cPpbSunoVvMVsS4GXk8pHWdmh+MXpX/DL6BfSindZWbDU0r/qhlnfvyA/Qu8VeoFMxufvFW/VZnG4yfDQ/H1dAleqdglpXRv5nxZSimZ2X54peT3eHfEB/GWsm1TSo+3mrde4myOt0osB+yeUnrHzFZMKf2gRZkuBn6aUjrZzBbHT47L4RXHO/GWpdVTShdmzF811rZ4Rf0V4McppXtrrLcizlJ4192F8HvMl8Yff/V88paAVmXaHb/dZSfzHk2H471xRuKJwEfwW0YuqBnnCHzMi7mAV1JKh2fO21j8uLa7mZ2O3wv8EL6Nrwscn1L6R6v5iliXAM+llM6KY9RrKaVvx3uD8ETcqimli2vEOTvifCveG4wfd+dOKX2tZpxfpJROjff2wo+hk1KLXgVmdg7wRkrpG9FzYl18/38H7xr8J3z8mVP6itMk1lfwXnPv4ImWe/GE6VwppT1rxlkLX//v4C2/PwQeSt7S2VecsyLOSWa2DF4BfQP4dUrp67EM/5dS2i1j3sqxlsOPT6/jCcsT8OTV2IxzQDnO8pU4J+KJ78VSSme2KlPEux74L94TZF48yXx3vLccngT9SrGtZsYqLuIOTyndF+9Nhyfh30jeeyUnzkg8cXN4cfyIFt1vAn9NKR3RIs5W+MB3bydvzS+mG35+eQlvJTwpY96miGXeG25p/MJ+HH6rwfOZcX6XKi2QcRz4JvCPjHm7GF9ng/Ht+RT8+J/amLeLaVxw/SD2m1H4LZp15m16/DbDuVNKPzGzj+O9g95JKb1q3kPnNvwWjcl9xLkGP4bMg4/tdHvpvVnwuthrrbajUqx38VvWTo/z2vT4sWU0fk54ptX5yczux4/7B6SU7ohpi+CNU8PxxO7jKaWbM+N8OVV6osX2dBpeD8qZt/vxdXNAcSwz71X3CTxJNAa/zavVvN2Hj53xndK0GfFlNCZ+Hk0pPVQnTvmaJNbbiXjd+5iMeZuiTJX3R+DHuWdbxHkYuD6ldG5p2lC8gWo0vo19L2PeesQxsxEppb+2OW/VWDOnUi8iMxuJN6L0OW/TrL6yDfqZdn/wHglv4y1KP6IyngBekRtJi3sdm8Q5uPL+x/AWzz7vBcyIUyS/Wt2/Nw+eVR9bmnYWce803gq0QV8xWsQ6kxhQE28Z3IjWmdZWccbllik+39vgnqOIwSwzYnTlqSN9xPl0xKk1xkDE2IXGCL6v4dn622jxzN8mccbHcl8ltsOn8GTJZzqM8wx+kZM7QvVc+MXZt+L1rXhiY/tYRqfWKEtvsXaKWKd1GGdHvGKcPb5IfL/ZU2fG4hep55M50GQvcYokwIVkPL0GujdoLZ5geYbG/ffr4sm7Y6r/s0txWh3feotzQs31NQRv5TshXr+Et9ytjrfYnU3+KOWtYp2GXzy1mrfe4qyJX7QflVMmGuMKlMfLOBW/FfBq4vGx5I0r0CrW3pnrrVWcYkyH3GW+JV0YSLmXWBPwJMcdeHJhGBmDqTWJs1PEuYtoaSVj4Gngi/jj2K7AE7gjm3wm99hbjTWKFvtqZpwpeqRlbk/b4rfyzIdfPF5HpScR0XuxjVjXk9HDrZdYF0dZbsZ7b5VblQfh55bjWsSoDsp9VmwDpxCDn9aYt24N8D0Yrz/8HO+d+GAsr8/h9bfcsYGqcb4b87QrngSas8a8VWMV6/ALZDxhorKMygPOfgE/xpxMRmt7H3E+j/dyORuvuw4m46ljLWKdRY3BZvHecuU4x8Qyv5mMXmV9xJmI15Hvo9HbdO0uxGprv5vWfvq9APrppxXvF/CHxt9r4hdsTxED3MQBr+VjB3uJ8yRRAYkDbs6AMK3ifDWnPPHZpSk9mxzvlnhV/H0b3rKcu5z6inUHMeBXh3FurVOmPv7HutQYkJPuPXWktzgjIk7LBEcl3uJ4BeIHxAVyblmaxConJ17H73u+lfrJiWqSY3/8/uesOHil43r8Pv7HS9NH4ffvjatRlr5i3U3mYzX7iDMLfqLMjdPqAv575D0GtVWch6nx9Jr4TkeD1uIXucXFUJHUXAo/juwwwOJMqLlsFsdbSO8BnihNH4zfnpX9KMQWsZ6k8qSdNuIMwROXq2XGKW4huxxPRDwX08fhvZ7e7/LaYayL8QuunAEi+4pzEZmPZY3vbEkXBlLuZqwuxjkRT5DsgO/zZ+HJ3KHxfvagtX3EKrqbZyWXM8qU+2SWW/HehcXrI4FzS6+ztu/MWOtlxqkmJm6gcvsOeY/2621Q7m/iSaU623dXBviO7w/Bb3eaGb+V8g08OZT9WO1uxulWrFhGl+L1rINjuewVy+g2SufONuN8M+LUGQS9VayZMuPMiycQFsRve70F721xFN7wuHSHcY6IOC0H4qwRK3swzmn1p98LoJ9+XPk9xygYimeCH8RbmV/tUpzXuhDn5Zrlqd6zNQK/H/c4ajxOr5uxulmmXuLPgPfs2KgLsWolJqZGHPxe0ltpPB+4dqtSfK8ryYkuxvkEPZ+j3sky6kqsLpep46fOdDnOILzXw+F4YuPrZDxnvpdYRmOMmG3wBGzW/e4DPM6ceC+Cb+A9V9ag9FSF/ojVjTj4LUX74Mnx4j7wzSk9DeODjtXFOHPg54/9K9NnxCv1K3UxVu6jPlvF6XPbjO15UOzvxWNmZ8VvK3kEvzA5kYzHxtaIdd8HGGcoPoDf0vRMBj4bf+9F5VHJHcTaE7gkc73dSt+JiT6TwaXyrBWvhxNPMInX0+Fj17TcJjNjfafVtlSKVySOjge+EX8/hCc5fwd8vgtxfk+9x4V2VKZYRkPwcReOxXsSvEn03IhldhUtGvJqxMl9ikLHZarE3AO/WD+cUnIMOIkavUz7iHMyNR4X2s0yTas//V4A/QysHzxj+i4dXpgOwDin4hngtbqwjLoSq4txuja4J11KTHQjDtEKQYeP4KF7yYmuxCnFG4Z3eexGEqgrsTqNQ5cu4LsVpxqz0+VciXckma2AAz0OfivYyXhL4AO0GHztg4jVzTJFvBH4WBq1eidNzVh149ClgZS7GauLccq9GoZV4i6CjxX0H/IGCe1KrKkVp/Le2fjAlU+TNwBuV2KRn+S4tEZ52h6Uu5uxqBzr8aTWiXgLdzFvq9GihbpbcbpcpvIymg+/HWz3ymdyllFX4nS5TOX9ayH8NpxX8VuNFsEHDW85oHq34nQ71rT+0+8F0M/A+sFbBm+eBuMsjg/E1o1l1JVY3SxTxOv0ArcriYluxenmD91LTnQrjuEXzHt3Y713I1Y3yxTxunIB36043frpdD8bwHFG4LfBjBsosbpcpsF0qRWpW7HqxsHH3rmA0j3S+PPZf4L3Drid/HFUuhJrKsQZXZo2jMaF5YnANTXnraNYXY5zIaUnANC4XWJnvGEhaxyUbsWie4mJojyje4l9KnBOzXnrKFbEOb8SZwJ+e9+ncsrSzThToUwXVva38q2B36yxjDqOMxXKdH5l294av3XiDPwifuIHFafbsab1n34vgH4G1g+VR9lNK3EiVtcuTLoVq5tl6uK8DagLHf3oRz/6+aj/0KWBlLsZayrGqQ7KPAs+0G/OoKxdifUBxlkeT770OXD1VIjVcWIiozxZg3J3M1aTOIeU3qszMGBX4kzlMh1aeX8xfIyWusuorThTuUyHVd5fCr8e6LPO3K043Y71UfgZhEhJSul/KR6pMi3FiVjvtf7UBxurm2XqlhRHyoESR0REAH/6zh54D4BVzOwpM/tsvLc8fnH4vw841tSI8wkze9LMPh3v7QD8LGU+6rWLsaZmnC3jvY8BX0ylx89N7VhmtjI+Ls3/gPvN7GCAlNJ/4iPP448N/kab5SmW0crAiR3OWzuxynFWL5ZRSulNMzvQzJZoozydxJlaZVqtsow+ifcGqruMOokztcq0ahxLPhPvbYzfDpJTZ+5WnG7HmqYV90yJiIiIyABlZkNSSv+Nv4fij4rbAX+825CU0sIfdKwPIM4YvEv+Ql2Yt1qxPoA48+CD9C3QhXnLjhUJhXVSSiea2Zr4I4NHAyenlG41swnAyymlZzssz7CU0oJdmrfsWE3ibBdx5oo47W6TbcWZymUqb5PTdbCM2orzAZZpaJeWUXacbsea1imhICIiIvIhZGYzA7/BRyy/dyDEGmhxBmKZ+nveupnk6EZ5pnasgRZnIJZJ8/bBx5qWDOnvAoiIiIhIWz6FP0qxGxXbbsUaaHEGYpn6dd6KZEL8/R/gcjO7mbhQ+qDL8wHEGmhxBmKZNG8ffKxphnooiIiIiHwImdlgYMbUhbGGuhVroMUZiGUaoPO2HbB1SmmrgVCebsYaaHEGYpk0bx98rGmJEgoiIiIiIh9hulASkXYpoSAiIiIiIiIitemxkSIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlLb/wOyB+8Un/zBkgAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABBQAAAHQCAYAAAARXjmZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACW3ElEQVR4nOzdd5gkRfnA8e97dxw5yknOSYIEiSoKCioICKIiqCQRFMSAWURBJImCqAgCgmIgqgQziAH9mUDBLIoREAFzTlC/P94at2/Y3enenbvdg+/nefbZ2Z6Z2uru6u6qt6uropSCJEmSJElSFzOmOgOSJEmSJGnBY0BBkiRJkiR1ZkBBkiRJkiR1ZkBBkiRJkiR1ZkBBkiRJkiR1ZkBBkiRJkiR1ZkBBkvSAExHfj4gdpzof81pEnBARv42I30xhHnaMiNun6v8/WEXE6hHx14iYOdV5kSQ9eBlQkCQtUCLiFxGxc9+ygyLiy72/Sykbl1K+MCCdNSOiRMSseZTVeSoiVgdeDmxUSllxqvMzHdT9+bfa0L4jIk6f1w3uiPhCRPwzIv4SEX+OiG9GxGsiYuF5+X9LKb8qpSxRSrm3kY/nzcv/KUlSPwMKkiTNA/MhULE68LtSyt1dv7igBlFa2qyUsgSwE/As4ND58D+PLKUsCaxEBnn2BT4ZETEf/rckSVPGgIIk6QGn2YshIraJiBvr3eO7IuL0+rHr6+8/1jvaj4yIGRFxTET8MiLujoj3R8TSjXQPqO/9LiJe3/d/jouID0fEByPiz8BB9X9/NSL+GBF3RsSZETG7kV6JiCMi4if1DvebImKdiPhKze9lzc83vrczcC2wcs37++ryp9THPf5Y71hv2LdNXh0R3wH+NlpQISI2johrI+L3dVsdXZcvHBFnRMSv688ZY92Br+u0buPv90XECfX1jhFxe0S8qm7fOyNir4h4ckT8uP7foxvfPa5ug/fX7fP9iNhq3J1flVJ+BHwJ2KSmdWhE3Fr/x9URsXJfnl8cET+rj5C8JSI615FKKX+rPWOeAjwS2K2mP6P2WvhpLTuXRcRy9b1eT5kDI+JX9f+/rpG3Uctv43uzIuJE4DHAmbU8nBkR74qI05r5q+t9VNf1kiRpLAYUJEkPdG8H3l5KWQpYB7isLn9s/b1M7Tr+VeCg+vM4YG1gCeBMgIjYCDgLeDZ5J3ppYJW+/7Un8GFgGeBDwL3AUcDyZANzJ+CIvu88CdgS2A54FXAu8BxgNbIxvF//CpVSPgvsCvy65v2giFgfuBh4KTAH+CTwsb6AxH5kI3eZUsp/m2lGxJLAZ4FPAysD6wLX1bdfV/O3ObAZsA1wTH++WloRWITcdm8AzqvruyXZKH59RKzV+PxTgEvIbXo1dX8MUvfXY4CbIuLxwMnAPuS++2VNs+mpwFbAI8j9+Nzuq5ZKKb8Cbqz/H+BFwF7ADuS2/QPwrr6vbQ9sQJaRNzSCQWOV3+b/ex0ZPDmylocjgQuB/XqBkYhYHtgZuGii6yVJUj8DCpKkBdGV9S78HyPij2RDfyz/AdaNiOVLKX8tpXxtnM8+Gzi9lPKzUspfgdcC+9a7+U8HPlZK+XIp5d9kY7j0ff+rpZQrSyn3lVL+UUr5Zinla6WU/5ZSfgGcQzYqm04tpfy5lPJ94HvANfX//wn4FLBFqy0CzwQ+UUq5tpTyH+CtwKLAoxqfeUcp5bZSyj9G+f7uwG9KKaeVUv5ZSvlLKeXrje1yfCnl7lLKPcAbgf1b5qvff4ATax4vIYMtb6//7/vAD8igRc+XSymfrGMFfKDvvdF8KyL+AHwMeA/w3pr/C0op3yql/Ivcr4+MiDUb33tzKeX3NRhwBqMEcjr6NbBcff0C4HWllNvr/z8OeHpfL5E31jLzbeDbjKxnl/L7P6WUbwB/IgMUkI9hfKGUctek1kqSpAYDCpKkBdFepZRlej/c/65/0yHA+sCPIuKGiNh9nM+uTN697vklMAtYob53W++NUsrfgd/1ff+25h8RsX5EfDwiflMfgziJbEA3NRt4/xjl7yXGye+YeS+l3Ffz0+xFcVv/lxpWA37aJu36euUxPjvI73oDCZLrB+Ovc3MGi78Di4z2uEbDI0opy5ZS1imlHFO3Q/+2+Su578baNpNZv55VgN/X12sAVzQCYD8ke6+s0Ph8/3r2tkGX8tvvQrL3B/X3BzqtgSRJAxhQkCQ9oJVSflJK2Q94KPBm4MMRsTj3710AeVd5jcbfqwP/JRu8dwKr9t6IiEWBh/T/u76/zwZ+BKxXu6wfDcyrgfrmyntEBBkkuGOc/DXdRj7mMTBtcrv8eozP/h1YrPH3dJiBon/bLE7uu+a2Wa3xerz1GygiViMf4fhSXXQbsGszCFZKWaSUcsfYqaRxyu/9PjrKsg8Ce0bEZsCGwJUTWB1JksZkQEGS9IAWEc+JiDn1TvUf6+L7gHvq72Yj+mLgqIhYKyKWIHsUXFrHG/gwsEdEPKqOS3Acg4MDSwJ/Bv4aEQ8DDh/Sao3mMmC3iNgpIhYiZxv4F/CVlt//OLBSRLy0DsK4ZERsW9+7GDgmIubUZ/HfQDZWR3Mz8KyImBkRu3D/RzymwsXAwRGxeR1M8iTg6/UxlJ5XRsSyNRjwEuDSrv8kIhaLiB2Aq4BvkONYALwbODEi1qifmxMRe7ZMc6zy2+8u+gJCpZTbgRvIngkfGeNRF0mSJsyAgiTpgW4X4PsR8VdygLt967PqfwdOBP6vdkXfDriAbHxdD/wc+Cc5oB71+f4Xkc/93wn8FbibbLSP5RXk1IV/IQcf7NxIbauUcgvZrf2dwG+BPYA96ngPbb7/F+AJ9Xu/AX5CDk4JcAI5yOB3gO8C36rLRvOSmsYfybELruy8MkNWB7F8PfARct+tQ44p0HQV8E0yIPIJ4HyAiHhMLTvjOTMi/kI26s+o/2eXGgSALHdXA9fUz30N2Ha0hEYxavkd5XNvJ8dl+ENEvKOx/ELg4fi4gyRpHohSxuv9KEmSRlN7MPyRfJzh51OcHU1CRBRyP9461XkZtoh4LNmbZI1ipU+SNGT2UJAkqaWI2KN2a1+cnEXhu8AvpjZX0ujqoy8vAd5jMEGSNC8YUJAkqb09ycH6fg2sR3Y/t6GmaSciNiR70KxEPoYhSdLQ+ciDJEmSJEnqzB4KkiRJkiSpMwMKkiRJkiSps1lTnQGA5Zdfvqy55ppTnQ1JkiRJktTnm9/85m9LKXP6l0+LgMKaa67JjTfeONXZkCRJkiRJfSLil6Mt95EHSZIkSZLUmQEFSZIkSZLUmQEFSZIkSZLU2cCAQkSsFhGfj4gfRMT3I+IldflyEXFtRPyk/l62Lo+IeEdE3BoR34mIR8zrlZAkSZIkSfNXmx4K/wVeXkrZCNgOeGFEbAS8BriulLIecF39G2BXYL36cxhw9tBzLUmSJEmSptTAgEIp5c5Syrfq678APwRWAfYELqwfuxDYq77eE3h/SV8DlomIlYadcUmSJEmSNHU6jaEQEWsCWwBfB1YopdxZ3/oNsEJ9vQpwW+Nrt9dlkiRJkiTpAaJ1QCEilgA+Ary0lPLn5nullAKULv84Ig6LiBsj4sZ77rmny1clSZIkSdIUaxVQiIiFyGDCh0opH62L7+o9ylB/312X3wGs1vj6qnXZXEop55ZStiqlbDVnzpyJ5l+SJEmSJE2BNrM8BHA+8MNSyumNt64GDqyvDwSuaiw/oM72sB3wp8ajEZIkSZIk6QFgVovPPBrYH/huRNxclx0NnAJcFhGHAL8E9qnvfRJ4MnAr8Hfg4GFmWJIkSZIkTb2BAYVSypeBGOPtnUb5fAFeOMl8SZIkSZKkaazTLA+SJEmSJEnQ7pEHjWHN13xiwt/9xSm7DTEnkiRJkiTNX/ZQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnQ0MKETEBRFxd0R8r7Hs0oi4uf78IiJursvXjIh/NN579zzMuyRJkiRJmiKzWnzmfcCZwPt7C0opz+y9jojTgD81Pv/TUsrmQ8qfJEmSJEmahgYGFEop10fEmqO9FxEB7AM8fsj5kiRJkiRJ09hkx1B4DHBXKeUnjWVrRcRNEfHFiHjMJNOXJEmSJEnTUJtHHsazH3Bx4+87gdVLKb+LiC2BKyNi41LKn/u/GBGHAYcBrL766pPMhiRJkiRJmp8m3EMhImYBewOX9paVUv5VSvldff1N4KfA+qN9v5Rybillq1LKVnPmzJloNiRJkiRJ0hSYzCMPOwM/KqXc3lsQEXMiYmZ9vTawHvCzyWVRkiRJkiRNN22mjbwY+CqwQUTcHhGH1Lf2Ze7HHQAeC3ynTiP5YeAFpZTfDzG/kiRJkiRpGmgzy8N+Yyw/aJRlHwE+MvlsSZIkSZKk6WyyszxIkiRJkqQHIQMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSps4EBhYi4ICLujojvNZYdFxF3RMTN9efJjfdeGxG3RsQtEfGkeZVxSZIkSZI0ddr0UHgfsMsoy99WStm8/nwSICI2AvYFNq7fOSsiZg4rs5IkSZIkaXoYGFAopVwP/L5lensCl5RS/lVK+TlwK7DNJPInSZIkSZKmocmMoXBkRHynPhKxbF22CnBb4zO312X3ExGHRcSNEXHjPffcM4lsSJIkSZKk+W2iAYWzgXWAzYE7gdO6JlBKObeUslUpZas5c+ZMMBuSJEmSJGkqTCigUEq5q5RybynlPuA8Rh5ruANYrfHRVesySZIkSZL0ADKhgEJErNT486lAbwaIq4F9I2LhiFgLWA/4xuSyKEmSJEmSpptZgz4QERcDOwLLR8TtwLHAjhGxOVCAXwDPByilfD8iLgN+APwXeGEp5d55knNJkiRJkjRlBgYUSin7jbL4/HE+fyJw4mQyJUmSJEmSprfJzPIgSZIkSZIepAwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgYGFCLigoi4OyK+11j2loj4UUR8JyKuiIhl6vI1I+IfEXFz/Xn3PMy7JEmSJEmaIm16KLwP2KVv2bXAJqWUTYEfA69tvPfTUsrm9ecFw8mmJEmSJEmaTgYGFEop1wO/71t2TSnlv/XPrwGrzoO8SZIkSZKkaWoYYyg8F/hU4++1IuKmiPhiRDxmCOlLkiRJkqRpZtZkvhwRrwP+C3yoLroTWL2U8ruI2BK4MiI2LqX8eZTvHgYcBrD66qtPJhuSJEmSJGk+m3APhYg4CNgdeHYppQCUUv5VSvldff1N4KfA+qN9v5Rybillq1LKVnPmzJloNiRJkiRJ0hSYUEAhInYBXgU8pZTy98byORExs75eG1gP+NkwMipJkiRJkqaPgY88RMTFwI7A8hFxO3AsOavDwsC1EQHwtTqjw2OB4yPiP8B9wAtKKb8fNWFJkiRJkrTAGhhQKKXsN8ri88f47EeAj0w2U5IkSZIkaXobxiwPkiRJkiTpQcaAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6qxVQCEiLoiIuyPie41ly0XEtRHxk/p72bo8IuIdEXFrRHwnIh4xrzIvSZIkSZKmRtseCu8Ddulb9hrgulLKesB19W+AXYH16s9hwNmTz6YkSZIkSZpOWgUUSinXA7/vW7wncGF9fSGwV2P5+0v6GrBMRKw0hLxKkiRJkqRpYjJjKKxQSrmzvv4NsEJ9vQpwW+Nzt9dlkiRJkiTpAWIogzKWUgpQunwnIg6LiBsj4sZ77rlnGNmQJEmSJEnzyWQCCnf1HmWov++uy+8AVmt8btW6bC6llHNLKVuVUraaM2fOJLIhSZIkSZLmt8kEFK4GDqyvDwSuaiw/oM72sB3wp8ajEZIkSZIk6QFgVpsPRcTFwI7A8hFxO3AscApwWUQcAvwS2Kd+/JPAk4Fbgb8DBw85z5IkSZIkaYq1CiiUUvYb462dRvlsAV44mUxJkiRJkqTpbSiDMkqSJEmSpAcXAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKmzWRP9YkRsAFzaWLQ28AZgGeBQ4J66/OhSyicn+n8kSZIkSdL0M+GAQinlFmBzgIiYCdwBXAEcDLytlPLWYWRQkiRJkiRNP8N65GEn4KellF8OKT1JkiRJkjSNDSugsC9wcePvIyPiOxFxQUQsO6T/IUmSJEmSpolJBxQiYjbwFODyuuhsYB3ycYg7gdPG+N5hEXFjRNx4zz33jPYRSZIkSZI0TQ2jh8KuwLdKKXcBlFLuKqXcW0q5DzgP2Ga0L5VSzi2lbFVK2WrOnDlDyIYkSZIkSZpfhhFQ2I/G4w4RsVLjvacC3xvC/5AkSZIkSdPIhGd5AIiIxYEnAM9vLD41IjYHCvCLvvckSZIkSdIDwKQCCqWUvwEP6Vu2/6RyJEmSJEmSpr1hzfIgSZIkSZIeRAwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzmZNNoGI+AXwF+Be4L+llK0iYjngUmBN4BfAPqWUP0z2f0mSJEmSpOlhWD0UHldK2byUslX9+zXAdaWU9YDr6t+SJEmSJOkBYl498rAncGF9fSGw1zz6P5IkSZIkaQoMI6BQgGsi4psRcVhdtkIp5c76+jfACkP4P5IkSZIkaZqY9BgKwPallDsi4qHAtRHxo+abpZQSEaX/SzX4cBjA6quvPoRsSJIkSZKk+WXSPRRKKXfU33cDVwDbAHdFxEoA9ffdo3zv3FLKVqWUrebMmTPZbEiSJEmSpPloUgGFiFg8IpbsvQaeCHwPuBo4sH7sQOCqyfwfSZIkSZI0vUz2kYcVgCsiopfWRaWUT0fEDcBlEXEI8Etgn0n+H0mSJEmSNI1MKqBQSvkZsNkoy38H7DSZtCVJkiRJ0vQ1r6aNlCRJkiRJD2AGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcTDihExGoR8fmI+EFEfD8iXlKXHxcRd0TEzfXnycPLriRJkiRJmg5mTeK7/wVeXkr5VkQsCXwzIq6t772tlPLWyWdPkiRJkiRNRxMOKJRS7gTurK//EhE/BFYZVsYkSZIkSdL0NZQxFCJiTWAL4Ot10ZER8Z2IuCAilh3jO4dFxI0RceM999wzjGxIkiRJkqT5ZNIBhYhYAvgI8NJSyp+Bs4F1gM3JHgynjfa9Usq5pZStSilbzZkzZ7LZkCRJkiRJ89GkAgoRsRAZTPhQKeWjAKWUu0op95ZS7gPOA7aZfDYlSZIkSdJ0MplZHgI4H/hhKeX0xvKVGh97KvC9iWdPkiRJkiRNR5OZ5eHRwP7AdyPi5rrsaGC/iNgcKMAvgOdP4n9IkiRJkqRpaDKzPHwZiFHe+uTEsyNJkiRJkhYEQ5nlQZIkSZIkPbgYUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ3NmuoMCDhu6Ul890/Dy4ckSZIkSS3ZQ0GSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHU2a6ozoOnphw/bcMLf3fBHPxxiTiRJkiRJ05E9FCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmfzLKAQEbtExC0RcWtEvGZe/R9JkiRJkjT/zZNBGSNiJvAu4AnA7cANEXF1KeUH8+L/acTDL3z4hL733QO/O+ScSMM10bINlu8HghU/f/OEv/ubx20+tHxIkiRZLxkxr3oobAPcWkr5WSnl38AlwJ7z6H9JkiRJkqT5bF4FFFYBbmv8fXtdJkmSJEmSHgDmySMPbUTEYcBh9c+/RsQtU5WXeWR54LdjvRlvHlJab4yhpBMHDSedTGyIaS246QwzremWzjDTmi/pDLV8L7jpDDOt6ZbOuGl12vsL2Lot4OkMM63pls4w05pu6QwzremWzjDTmm7pDDOt6ZbOMNOabukMM63pls4w05pu6YybVsd6yXSyxmgL51VA4Q5gtcbfq9Zl/1NKORc4dx79/ykXETeWUraaTmlNt3SmY55ctwUzT67bgpkn123BzJPrtmDmyXVbMPPkui2YeXLdFsw8Tcd1WxDMq0cebgDWi4i1ImI2sC9w9Tz6X5IkSZIkaT6bJz0USin/jYgjgc8AM4ELSinfnxf/S5IkSZIkzX/zbAyFUsongU/Oq/QXAMN8nGNYaU23dIaZ1nRLZ5hpTbd0hpnWdEtnmGlNt3SGmdZ0S2eYaU23dIaZ1nRLZ5hpTbd0hpnWdEtnmGlNt3SGmdZ0S2eYaU23dIaZ1nRLZ5hpTbd0hpnWdEtn2GlNa1FKmeo8SJIkSZKkBcy8GkNBkiRJkiQ9gBlQkCRJkiRJnRlQmIciYp5NMxoR7rsHuXlZvibigVwmh7mtp9t+m44sS/PXsPI0jHR6aUzH7TQsETFzqvMgSdKwPGArbdNBGfIAFRGxfEQ8NiKWLaXcV5dNqNI13SvsD/DK5KS2fUTMgsmXr4hYNSJ2mGQaQyuT9bsz6u9JDRgbEXMi4iHDKufDPJaHsN8WGVZeett5WGkOoWwvBtArS5NMaxjle+jnyVJKmez5LSJWiohN6rTMQ8nTsNKJahLJLDGMPE23sl3T6JXve+vfE71+rxgRa0w2P9NZRCz2QK4HDNN03E5DuIb3AovDOO6m1faZl2V7uq3rMDwQ16nngbRu07pRuSCKiKdGxMsi4vKI2KexfMYkG1rbAh8BngtcHRFLwkilawJpvz4intj3PzrnLyLWjoinRcQ6tXE52YryDJjUevXS2W4y+ehLa62IeHRErN5YNpFttTJkY2mS5eGYiNgvIlac4Pd7PkCWpW0n8uV5UCYBXhkRFwCv6qU3QR8B9ppswzQidoqIYyNiz4hYdzKNk3qcvDsi9p/kcXJMRDy8L+2JnsuPjYiLgZdO8vw0rLJ9dt1GmzTSnmhakyrf1VDOk/V7O0TE6yPiiY1jZKL77VJgu1LKvyf4/V6eto6IF9RyPuH6QEQ8PiJOjojdSjWJbJ0fETtP4vs9061sQ1/5nkRw6QrgKxGx8UQzMuxrdyPd5SNi0cbfE77OARtGX2+OYdUxFmTNbRARMcyAd//2nsD394ecPr7+vVD93XW/PSwilpjMNXxY9cl5YGhlOyLWi4g1ImJTmHggtp4P1o+ItSfy/UY6e9XfQznOhl2+J5iHmc31GUY5iojVImLhqV63YVrgT6zTSUQsDZwB/I6cMvMNEfGpiNiulHLfJAvOCcAHSikHAd8B9omIcyJiX+h2EomI7YE9gRvq3zO7ptFI52JgB+C8+rNnRCxb3+900EXEo4GTIuLTEfHYieSpprMP8La+ZRNtBOwJXAQ8D3hOXTaz6wUqIp4GXBMRx0XE8r3y0PXiHRGrAUcB2wPPjYgn1+Vr1pNe2/zsCfwVeA3w6ojYrLduHbIztDJZ//fTgd2Ay4FdgSfVC9wWzQpqi3T2JGewOb/+vUOtOO8bEUt1SGc/4E3AbcDBwEeBAyNipfp+6zJVy+TLge8BLwAujYiFJnCMPBV4VCnlu41lSzd6h7Q+p9f1exzwfuDJ5LG7dURs13F7T7psR1oWWB9YCTg+Ik6ob28bEYt0TG/S5XtY58n63acBJwP/Bd4V2XtisQnut12BGaWU99S/94yIIyLiVRGxfId09gHeTtYDTgNujIjdGu+3PZc8HTiR3N7nRMRZMcGGd91vS5dSPlv/nhMRD4uIVTruu2lTtms6bcr3Qi3TegrwR+AdwOERsUxd3qUMDfXa3Uj3EOBdwI8i4jkw4ePl1cDWpZQfNHpzrDWR9CLiEXW/71y/P+GedBGxWb2eTCZIOSn1vNGrfzwPeGf93fxM1+vKPhGxO2TvmehrQHVIZzfgwoi4KSL2run9p/7uWg4+SR5769W0ux5zQ6lPDkvU3mRDLtvPB04HjgZeEhHviIgtJpC3w4B3A2cDB3epJ/Wl81LgCBhaL8NDGEL5nmQe5pRS7q0B5VdExOJDClBdCLy28X+mS7Br4kop/gzpBzgQuLJv2VHAXcAbyUrgRNJ9DPDRxt+/IRtzzyIbO0d1TO+dwPPq6yeRje8fAS8FZnZI58PA0+rrNYGvApcAL5zgen4beCYZvf0xsN8E0/kSsHN9/Qhgf+BFwB4TSOs7wBOADcgG5Yvr9jq0Yzo/qPvsBOBjZIAiGu8v1yGtN9ft/HqyIfBm8uK7SMdtvWN9/Q7gPVNZJhvburffTgE+X9frDGDvDunsB7y6vn4p8BnggrrfXtYhnY8Bj2v8/RHgqppOtE2nfvfGXlrAQvXYWbfx/uyW6fyksY2eXffdr+vvTueXWoZ2qa9PBq6t2/ztveN6Csr2k+u6PBI4Ffgc8FNgyY7rNqnyXb83lPNk/f63GvvtXLLidkPd1l33267AWxvl+xrgDTV/7wRmtUznukaZXKTu+28Ap3XMzzcb6SxH9p54SNftXb9/J/CE+vrwWkZvrdtpswW5bI9Rvj/ftXzXsrQjsBh5fjxrAtt5qNfums6S5Dl8XeCJwPuAfYFdgE3rZwaeN8nz4yeAR9e/X0k2dr5ABkFW7JCnI+rxcWxd56uBx0xw/Q6vebio7sONJrqtJrGNtyd7Xj0NeEk9hg+qyxYFNmh8ttU1qu63fwN/A74IbNg1jcbnV6/noKNrub4OOLR3TgEW6rDfPlX322snuK2GUp8c0n7blmwDPGaIZXtx4OfAisCcus3+j6ynPqPt/gOWIq9rqwLrAZeR16eu16Ul63lk3fr3Y8mA/ovJG0Uzu5Sn+r1Jl+9J7rfdgPvIgOuO9fVngf0bn+ncrgOOrNv8amDfqSqXQ99eU52BB9IPsDzZcNm+b/mawHuBh04w3XWB9errRwOva7z3KLKS1CUQcAh552YxsgL5BGAn8sJ7/qC0gAAWBs6pJ7El6/LzyYb79cCRHdfx5cBHGn8/CXhX4++l2hy4dd1+AMwCHlJPcOfUk/l76VAJIO8mNRvNt5EV94PJSvMuLdNZGHgh8DDy7tTT6gnyEuDhZMDjpR3ytTLwunoCfyTwM/JCsj+wWovvbwa8sa/cfopsMK9al427rWuZ7l04JlUma3laGnhJ/XsJ4BdkEGcWcBjZkF+mZXobk421p9Z1WrYufxzZVXjNQfmpv0+u23mV+vdHyYryh4CXd9hfa9a0FqZWqOr2Oa6+3h94dot0HgZ8DTgL2Luu4+PJSsCngDPbbu/6+zXkxfF55N3llcnK/HPJSs+456u63xZplO0VJ1O2yTvlS5CVmYfXZd8igzEfBrZpmc5mwLGTKd/1M5M6TzbS2Ro4pb5eDLidbGStVtM6u21ZqmmsQjaQDyQrOivW5WvV7b5Ji/02i2wUPQNYvC7/YD1G/tfYbHmsnVFfL1T34QeBI+qyZwNPbpHOTPJ89iWykvwy8py2LhmkOIk8/loFTcmK+mTL9qJkxW/SZbuvfL97ouWbbJCc0vh7xVq231LLVjBORZuRa/e7GdK1u5H2qcC76+t1gX+QgZj31W22fIs0eueml5HBiJnA18kG2Wo0gnwt0lqcDMCuVvf9rnX9vkHWN2aMt6360loSuKWWoznAW8nAwuy2aQzjh6zbXEIG2e8BnlOXf4pskP6gruMaHdLcCzi5vj4R+HMtD73zwp4MOKf0pXcQ8Pz6+o3Af4Avd/j+kmQwYC1gnbrdzwOW6pDGUOqTQ9xvPyXP2SeRPTBfNpmyXdNco28dFyLPvc8m67qtgpTkTc8P9m2bjwMr1L93YECdqX7uWOCO+noVMtB8PFn3+RCwVod1Wwz4LrDxZMv3JPfbFWQvgvcDfyDrX6fWc8gnyB6s59XPtj2XLFXTWZmsT3wPeFZ9r9ONiun2M+UZeKD8kBfqmWTU+LPkxXAWsHB9/4vA0yeZ/oz6s0hj+YuBSzqmtShwJvB8+iqzZKW71V0XsnH1nnrSeDfw2bp8CxqVnpZp7QfsVF/PBh5KBgOWqevc6q4XsGE98VxS98MJdfkSZAW6S0NwqZrGL8hI4uV1+SyyEfXOLicA6gWMrNBtQDZYPkZGPbftUA5m1f/9PDKC+j0yyPFOYOWO2312/b1MLRMvmEDZnFSZZJS7Fv0XjFoWVumQZq/y/3Vgh8byG4B1WqbxWLIicw5wJXBRXb5ZXd7qbkv9zvJ922gr4NN1+XeBrdrsKzJ4cBh5YT2r8d4a9dhdrEOeFifv3pxQv7ty470b+vfBgLRmNfK4fpey3b8dyUeLTiIbzN8m73odScsKCXMHbnq/O5fvun3OYPLnycUZqZxvCOzWeG+Vrvutfu+JZM+bW6h3o+rybwIPa5nG3mSg7gSycfSRunxPsudTm6DLbLLSv1hj2S513y8BfJ/s3jsonZl963Z773iry4K8izewUVo/vyIZMHvTRMr2aOte13VC5+2+dPYle2Ad1LV8123aq1P0zt1bkNe71vULsoHVC7RP6trdSHMTatCX7Dnzqvp6oVq+ntkijV5AYddadi4Aju7L9xW0OPcCy9b/u0SvjJEN5R1quq2vleS19gPN8lGPnbUbx0zrRvcEt+9DycbZpvUY+WrdFhcAvyLPl08lz1nHd0h3YRq9f8iAyZXA3fX/3dr2uKvfXww4p7G/rieDgb+lxXkOOIC5A8Irk9fbp3bIwzMY6TU14frkkPbbGmTvsc3rPvpI3RbXkz05diJvyrQu2431uoi8DryIvEnRa9x+ANi1Q7l6VD0+FqnLLiF7Uy1J1qGWbZHOsmQ3/u+TgbzDG++dTcveT4wEc88igy2PB34JrF/f71S+J7HfFiGDxr3AyiPrsXA32fP5LPIRxi+13Wc1nTfRuPlD1nfeT8e6+3T8mfIMPBB/yIvL18iuQy+rB8A3J5DOKtQu941lM+vvqCfa79GyAtlLs/7eg2wkf49soDyMbCRf1zKdxervXckI5wG9fJB3hy7rkKdeWg/pW/7Rui2PA97bcdvtTQYWVmssO4UaYGjx/YUbr/cFdia7Ki9Vl50AnN4inZXqNlppjPevoEV3bLISsXjj71lkpPTHwJPqsjVapLMdWXltbpdemdqJfDb3oEmU/VUmUCYvIy+yi/bKdt/7p1IrKB3SnE3eUb6YrEicXX/e3+K7OzBSMX4keQfn0b3yWY/nd7XMx0q17NyvKyMZjPsMLSrwzfJDNio2A+Y0lp1Myzvd5EX6xTQCNOTdylPIivOZwIUt0lkdeHozHxMs25eRd20WbSx7Ddmo3K3+3arB3V92+t57fNvyzUjDZl/yPPkdJnaeXLiWgSUby2Y3Xp9Eyy7rZCV5x8ZxsjZZqfkleRf4POCCFumsWo/1hck73i8lK4+9ng5nUHsdDEhn+Xp8LN1Y1guavp+slL615bp9qB6fzTLQTPfNdD8H9M5rz69l+5AOZftYxumB1rZs18+uR941XJ/aMCOvmbcDu7ct3+Q1YOEx3nsGOXZTq8f6yOvHvnXfT/jaPUbaQd9jNzR6rQz47l6N148mA/p/B3YnG4IX0QgwDEhrBhk0uaXuz3Oo9Qgy+H5Qh3VajDxvLsRIkPJs8jGIJcgg1TxvoNZ1mll/9xr7N9Xf/yGDeY8gu4oP7M3D/a+1zcd51ieDZm9ouX0WYyTYdRh5l/su6iMUtKufLNSXh1l1XQ8hG6kHdNhfc/qWTbg+Ocl9NpPsZdULum9I9ij6Wd1n/yLPv13K9vKN1/vXbf08RuotH6HRoG+7/xvLjyCDIO+jEdwZb781Xm9LjqOybGPZ+6k9UFuk1QuUvpoMkF8AXNx4v3X5Htb+a7xemHwM8+56bHyHvO7+hEb9fEB6S/aluQh5Hf8xEwhQT6efKc/AA/mHvAPx0npyHXiXZpTv70J21/w92f2zFw1fsZ5knw+8omVaq5JR2fdTo3tkQOIo8sL43vr+ugPSWZG80/9ustvfsn3vrwTcQe2q1CJfjya72D1plPcew0gXxaUHpHO/ChlzX5i2Ii+8y7TI0yPIgW76K0Wn1bycR94hHjdqS97h/mbd5reSleJHNN5ftu7XQek8rJ68jiYrlb1uqi8G3t6hPD2a7GJ7ERkhv9/jEWTld9zKLWP0yiAb8YcAr+yQpy3JAMSH6OtqW8v4JnW/DdpG65FBpFX6ls8hu2W/kmxELTognUeSAYjRek1E3Rc/psXd6cb+/0Bz/zPS0DmdfG51UNl+eN3/r2SUboz1/e+1LNu9MnBxLQNr1OUrkL0VLiQvbuN2l2xspw+Qx/uKfe+3Lduj7n/yjsnz25ajAWUg+j7TtTfAamSD5J3kHc6B58n6vW3IBtF5dZu/qa9sb9Nhv60B3Fv3zUuALevyxcjK6gsYp8HZSGc78o7TxcDHRynfm7Qp32Sl8Yu1vHypv7yQDcjfDSrb9bNb1ePj/YzyeATZK+C7LcpSb/+v1rd89Vq239uybG9f91fvUamZfe8v06Zs18/uSFY631+30+nkXeQ9u5Rv7n8NWKQ/b2Tgctzz2yjHQ/N1p2t343tbk0Hqi5i7J0gvKPdE4Ost0nl6XccLeuWGbIy9mLzreTotgkGjpPvEWh73YuTa+WHaN3Ae17edekGzPcjz30W0aHRP5odsWK9VXzfH3lmdrJPdDnyhLvsQjUcQx0lzW7J++l7ypsfqNOo85GMr3225fT9EdgF/amP58Yw81td2XJfTqecA+p65Jxvd13Ytn43vt65PDnnfLU3WBd5GNvwPJ69vm5K9X//YpWwDTyF7Nx7EKI+B1HLZ5ngbc/+T9aZbgW+0SOdRZE+nzzLKdZEMBg5Mp/H51zDS1lmVbICfBzyxS/me5D5bnzwfrUP2/lihHoNB9iT5BTnuyEa9fLZMt9db6n7BB7Itdjodx4uaTj9TnoEF/acWvJ3IRsvQu7zVk8a69SD7E9mQvaLxftvndj5INmi2JytVm5KRzd5d14VGOzmNks5F9cS4O3nXp9mtaRbZVan1oEfknZ4ra96W6a0TeTFZhnyedmDQpJ5wTqYO/tT3XpAVyrbBl49T717UfKxUXz+knkyOGO3/jJLOR6jPxteT9WnkBfHQxmeWaJHOMmTj7QtkZPZIsvL8TEbuCAx89IK80/rM+vrEmp8f1BNZmy6kc2h0BWakYtVf4e4y8M415N27zckL/XP69tsytHhEgbwL9S0yqr0DHQdL68tP73m2TcmLyjuYu5v6wHEq2ux/siI48FghzzHfA24mB/LZn+xG//RaPrenPi40wTLwfXLgrC7PqF7bOEZOI89PHwMO7li2+/d/c7CjXoOk1bOuo5SBZRrvtTk++u/WzfWMdd1fs9tuJ/JOxqFkhXJTcgC+nwN71vcfzyiB1HHSO5NswL+J7LVzOC16JPSl8ZnePiKvA4eT57v96rJlaDHOTN3/vQEr30UGEs9tpLN585gZkFavcrx33Ye7N/cJWZlrk6fx9n/vkZM2j3GMNxhnr8Ld9vnk9zNyDugF8N8LHNYsZy3SWYa5rwEvJOscRzJyx7zLeXdG39+9a/f2bdNofPfGuv8vIBuRD2UkaPpQ8to+8HGMWqaeRXbdvt8Aeh22+VqMc/6v+/SrLdPajwxoN+/A9s5Ls8hz57e6brOO23cj8jz5AXIA3n+T16TV6vuLAX8hbzxdTaN+OCDdHzIyuON363ef3Hh/W1rcMSV7Z+xKjk9yCXkj7NFkHbF3vLQp408lx924Gli9WVYZ6QY/8JG10Y6DmsYyZG+uVvXAIey3DcjA1bvJXgjvIm8CHl/L+CvIYMKatBwfqqZ7BBkUPpG8JjyBvDZtV9/fncZg0l33f6N8v5N2Yzt9E9iHvJ6cTQY/lybv5i9c17PtmDx7k0HFa5k7OPl0cgyDj7Ut35PYb5uR15EryTrJb8jrZC/IsTQZTD+1/t02WLZzPYavJnvJPJe5e+FtRuOxqgXxZ8ozsCD/kN27v0ae7C+oB9RrGIkkL0RG7ycyCmjvoH5m7wCqBe6P5J3Fl9VlbU7Ua9K4gDIysM97yAti28EFtwFubPz9uHrA9QbnW4pulZr9yaj2EmSX5+sZGTStt/6PapHOc8i7Ya8mK7UvpnHHtGOeDqHxeArZ/fd6Mvq6V9t9V/f9GWTkttndeQfyudlXd8zXzuQFd2OyQnpXzde4g3A18rNkPZntRwYGfkOepB9X1+29g06MtZxfMVZ5IbvcrdFhnXZi7kGF9iIrzfu2Ldv1cxuSUevHkZXX95FR5W3q+1u0yRd5F+GndR8tQV7U3lH31ddp+Yxy2/3fdjvV7+1B9grYjrw7+nvgMx2+P14Z2Ils/J5Pi9kmyDvKlzb+vouslOxOdv1r+/z9WPu/F9BpdaEeUAa2ru9vNqgMkBXOOdSRpBk5B7UO2jW29VLkoxyP7HvvqfV4e2SbtPq+uyVZiVyLrAj+iKz4PIl2vRxWojG4KVm5fh3Z5f3ntA+4Dprh5aUd1ml34OrG3weRAY7esTusc8DDaT8Gx3iDcV7Q5hjp5Z3s2fJ+5u6ivBV5DJ/B5K4BdzJyV3rQILqL1p+H9i3vlfFO1+7G919HPRfUcvnjmrfvUp+Z7v+fY6Tz2kY6zyEbO8/pmp/6/ZvI89nz+pb3AuDb0H6A168wcmf0UbVsvIxaFyBvVOw/kXx2WJ8rescmGdT6Qz3mfkoOPrg2dRYkshE7bi+l+rmn9R1325ABz08Ah3TI22sYqZ/OILuCv5dsjL6TbrMWfI48t7y+bvfezCBtz7ubMcqjpcwdFB44VtEQ99vHGRkj4avU4BPZa2kvMlA5kRlaliADFUdRe6mSdfrWd+0H7P/WM5jV9euNLbY0ef6/iGxwd+5F0CgDJ5PB8/6ej2u1Kd+T3G/nAC+qr79K9sR+K1k/ekUt10c1zidtr1G9wZj3JK9zb6v7brv5VSbn9c+UZ2BB/qmFoTc67gZkV6QTyQrNcmRlaUIXxb7/81qyEnI6OcvAJjQqdC2+vxh5UXoDWfH7XuO9g4BjWqazI3U6wMbB9G5G7sC8H3h8h3x9jBoBJyvh7yDvvnU6YZCVrIPJbsH71YP/bGoDkLyb13YAtVeS3T5fRDYG309Wtp5TTwitKiI1rY3qyWnXWh56d23WqSenVutZt81aZIV2tZruHWTD5IO0fHazbptLyYbO/zWWzyCjpqOmw8jdgavIivqFZOBmVbJBugx5MTmw437bgJEAUu9OxsFkw2DUMSfGSGdxGmM21O19Lhm9fwZZ+dqzZX56x8jngQ/1HSdvbJunFvv/vR33f++5wd5jF3eSXRIvp+Wze5MpA31pPKSx3zZh7kcVVicbXW0qtUPZ/8MqA3W7XEn2drrfuBDkHYU1O+TpYPL80f9Y2PMZeeys013lWp72IHsA/IKsfL+bFo9g1DTeRV4LLmHuIPNmdb3bBJTWZPwZXrpMXbkVtQcSecd3oXr8XUq3qRSHcg5opHUGkxyMs35+mbo9DqvHyqzG/7iMFr14euWESVwD6na4pv6+3zWaDAg+uu16NfbXy3vHRN1vH6yvH0s2oDZskc5CtRyv2li2JxnQ7TXm2zYqX0j25NqFDJ5ezNxjULVq4DIyG8ZZjPRuuYGsgx1DXr8Hrttkf8jG42XM/SjBa2q+bgO+TPZcvbRjuuvWdHci64cHkvWmzWs5aVun2IqRG2ivAM6vr1eqx3Cr4ETd/1c0ysOJwEn9+2TA/rqJuWdA6e9xts94aQx5vy1HTum6BRnI/yxZv309+ejTI8kg6DdoMQvOKOk/jpFZsV5LBvXPqWWjzbV30P5v80hnkDcF16x/H0sdo4qsr3ybDj2e6nH10fp6bbKu2WpMiSHvu5eQdZDF6zbZmLwx/Kd6vB1MBk2e2CHN1Zg7CL9IPXZeRrYXF+jZHf63XlOdgQX5h2xk9lc4NiUrMr0K44QLCiON9seQEfvb6TCaaF9a25EN+Bcy95SBBwCf7JBO/7Oy+5FduLYHPt9l3RgJJvS6bG5BViI6P4/IyF3EGXUfHE5GOW+mxfNkfWltRFbS/ksdWbYuP4WWgwI18vJcMlL9JvLu9KZkdPMLE1jHR9bvfoKR7uYDu6Q1vr8Q2Y1xTl2XHcgKyxHAp1p8/wlkMGFbMvDyVXJ06VEH5ZtgOZ1NXnh/Cmze8bvNOxFLkA2vnwAf67iNdiGDUM05j4+i4/O782D/P72W649RgzfAizumMakyMNo2Z+7uwC+i0etgfu7/yZSBmu+ryYrgWWTl5tC6fK263Q7smJeFyQb8b4HXNJa/nHpXZwLr91BG7uD1Hp1pPZATWUk6jOztcHLf+rcOUveV8UnNOjTGdnsfebeq9VRjk9n//d9nkoNx9qW1Sy1Tp5AV911qGfi/tmk10ux8Daj5vpqspB9FNozX7vvMRJ9L741JsBD5CE/zkbhzB+Wt8dneYMe955SDrNhfRctHzOr316vbd1Gy0XQEGSQ8jQyaXUe3YPXp5GB+x1AbN2Tw/CTynD7XY1Hz4oecyelc6mMAZG+b82rZvojsXfRlsiHYdvDahcjG57tr2biW+sguY4xl0jLd5hgMp9L+ZtWjmLtX6QaMDGw+sH5BNma/Qjbi7/c/yZtNna6VQ9hvh1IHbyYDLCczMiPa6+ryM6k36TqkG+R14Bqy3n0TIzN+PaNlGoP2f9vH1WYzMgPdhsw9pss7aTza1SKtvZh78OktyZ5Op5DXrfkyzScZYHkf+VjC6eR16LXkdfdK8hG855NB57aBztnkuezDzD0GyqrkI38Hzs+yOc+23VRnYEH+ISufnyAjg9s0li9VD9DVh/i/dqXeQaD7HfxmJWvZeiK6iLyL8G1a9Crg/tHeXmVpdbLS9TNgxyGs5yZkQ/WVbQ/WcdLajXwUYoMJfn/jxusl64m71V2Jvm2+NhmFPKueVK4C1ptAfpatJ7eBAyWN8t3+Z2afRT5P/V7yGdhW24i8c9CbQ/27tSz9707SJPdXc5sdQ4fukmOtK/lsadsBQpv/fzFG7iauWI+TCd2RGuL+X6keGzdNdvtOpgyMk/7KZNfLgTN89OdlmPu/axkgK0RnUkfIJytCnybvTpwMvH4y25oMlH6J7OZ+9kTLEjlexkLkCOGtg8BjbJOFyOvWbWTD6Idt9tuA9DvP8DJWHslgwDuZYLByoueAvjQmNBjnGGmtTDbsj61l61xaTl/bl06na0DdzzfQCDqRPSubY53Mi7Gf1iK7dy8zyXTeQwYBJnojZTHy7uJLyTuMF00gjdfWc/anGBmo80QaAbl5+UMGWd5Qt+cLyB5ip5GN7b80zlutGoGNdGeQDdKNGAlWbELLgWL706q/e/XCzWgxmPIo6fTXVd/BgEdKyDroDXU7rU8+CnpYX75mMJ/vApOBpwvqOfY55E2915D1pd+QweZbmeDsIIzMgtK7qx90aHQPc/+PkvZ6Ez3+mfvcvSkZPGvd83mS+e7NdPTwesydSt7A+TYZuPtBff8kWszy1pf2TPL8fwEZpF66Ln8bLQNv0/2nd/BrgiJiJbLbz/rkSeJSagOilPKwjmktVEr5zxjvRZnAzoqIGaWU+yJiVinlv3XZcuSBsiR5x+XcFumM+f8j4sMApZSnd83fGHl9Bnnyv2SS+TqB7I71nMnkq6Z1OlmpeVGH78wASi9/EbEiGeBYrJTyp0nkZU4p5Z6ImF1K+XeH7821rSLiUeQAT78rpfx8jO/MVSbrvtmBvOu6M/n89pOBz5VSftc1L6PkaWYp5d6IWKaU8seu69RcTk5rt00p5RMt0umVvZnAfY19FuRdruVLKW9su36N7zLM/R8Rc8hu6Xd03f9jpDewDLRMZxHyzuCKpZR3D/jswqWUf43xXqf9P+D/tC4DEXEA2dC6lBwkcpmaj964Ba8qpXyn5f/tlaUZAKWU++rybclZe/5aSvllx3WZ0Uun/r10KeVPbctAI0/9x9u+ZAD8plLKDS3SGeu4nU1eB5crpbyl5TqNeezW/7FSKeXONmmNlT4dzgF93+s/bn9P9sT4c8c89F8DFiml/DMiFiul/H3Ad2eWUu4d472B14C6HrPInk1fAW6rZeBp5B2xp0TEa8nzSadz24B8L0seS98dVBZalIElyWv4dyeZp0eSPQ02bHteaRwzK5NdzJ9DHitfJ699O5ZSfj+ZfI3zv5cgGx33kQ2R95F3Rk8me06WmocbSyk7d0i3t13/Vx9svDezpv/HUspJk8z7scBvSimnTTCN3rZ/Ftmw27yU8s8xPns88N9SyvH176eR++qEUso3J7QSExQRi5HjE9xHnus/TQa0ziTrTD8kH4U+lZwy8qullO93SD8a55Ilye73V5RSbmvbRqjnhZmT2f8D6t3LUqc0b3stGPC/XkYdK2SsesMQ/kfvnDWHPGe+hRyvbh2yPXcA2ctgBtnLYAfyUao/tEy/V69ZnQzibEf2yrmZvLH7yC715+nKgMIQ1JPItuQYA88in7++rJTy2Y7pHANc1bx41oOcWhi3KKXcNMm8NgML97uojPL5JUspfxll+f8quBGxfGZxeAfEoJNjZFDkD4NOoG3WsUVeliBPINeOV3mPiIeSFY5VSylfaFy8e5XIRYB/tcjzImSFYWaz0tmrPI7XIGuZn1bfr2mMViZfQd5te2Yp5RtdGra14XFvf0W5t58iYnHg7y220SrAr/s/16iItG1ojVWhnasy39+gGyOtjclnpr9Z9/eE9v9YeRurUTjBPLUuAzWdWQCjVEJ66Y0ZDO37/Bbk3YbT+r7f+91q/9fvjnWc9C7eXQNujyErFE8m572+ri6/mRyI9DcDvr808OdGhW9C27p+dzFymshlgLtHaZAuS1b8Bh0nY+Wp67aZTVaS7+0rk3OdX9uUzRbHbtey2R9s6bSOEbEBWbn/ZS03c+23/nPBgLQWrS+XLKXc3VjeO78tA/ypxTZ6ONkb4oq+5QuVUv7TZhs1r919DZHlyB4XZ5EV571KKb9os35tRMRSwA6llI9N8Pudb570n0dGef8IckrNiTZwZ5I9UlcgxxD5TinlVxNJq+X/ey/56M9X6//ci+yK/haya/m9ZC+Az5VSftuljDb+x6yaTjTqcgPTiYj1gXtKX2Oqr064RCnlr13yM87/6xxcjgyUPQ84opTymWHko+X/fR/ZK+ZucmaHjcjHRC8lA0D/JM/H3+6Y7sD6R8t0Rgskzeiy/xvf668j/a++Q/ZiHRig7ktvvCDFCqWUu7qk1/F/n0vOMPIyMpD3FHLQ43eXUm6pn1mE7IU3B7i1lPLbCf6v2WTvmxXJYMW3SscbDNNWmQbdJB5IP2QEq3P3PLISe33fsoUbr1ciB2BrO03kY8nI7q6MMvc6dR7zFul8GnjuOO8/jgk8gjHWelBHwx60nuQYB88c67PAZr390SI/q5DP2I06lywtu6eTj798qObtft3HqXPFt0jn7eRzbWcxSnc4Miq95hDyc/h4+RmjTM4m7/r1ull2eq6N7F59Ofl82tH0PSdPPpvWZht9lnEesaHlAHrAq2oZP568w/qwvvcPoGW3TfLRn/tNdzaB/b9u39/9jyo8Z7RyMcE8jVsGGp87vm7zU/vzV99/IfVRmAHpfIPsEfG8Md5vtf/rZwcdJwPLAHmePJY8T65Rl+1Adkv8IDmA4Vtb5ucKsivyqKM2d9xvZ5GPpb2Z7B7/uL73X0SLQfRa5Gn/Nnmq+biupnUsfbPv0GGGlxbHbtvz2+Z9f/cfJ4e0TOc7jPOsf9tzSf3sUAY/JLvY3scYz0S32UaMc+0mnwf+CxN7fvsQ8hGCUac8pf0jZs8knzPffYz3d+nfpy3T7Q1+O+GxDSbz3cn+kAGEb5PBxJXJ8SA2IHspXND2HDJKunuRvVUOIoMrnbd33f+f6j/++z7zIibxvPtEvsvI44nNRyZ6U5kuPZ/22ypkIzTI6+WVZBDoPEbGTOiVzf3brGdN6wVkfemIvvXrrfOzW+63l5MBqf7ZT5pjvbRJ50Qa7QnqQL6NdAZOWTlO+r3tM1/GS6j/a1Hmbk/MJMdseBM5ltM+EyyT63P/AZnn23pNxc8MNFSllPtKizt1oziVfC6HiHhaRLwF+ElEnFjTvRPYp9RSOZ6I2Id8PmsGOUjWy4B9IrvbENmF7zUR2bVznHR2JwdLuqD+vUFEbBkRj4iI2fVOxF5kA2GgiNgqIjYqVdQuwY33tybHPWC89YyIQ+vnjoiIx/R/tt5x6qUz6K7y/uRF53zgkMjeCM33H0ZWfAat25FktP8gcnTrl0TEqyPibRGxSf3Yz8uAbtMRcRjZzerFZATzqRGxU0TsHBFzapT0vjLgjlLL/PxsQH76y+RbyWf+Di0jd59a30mKiBeTo/++mnwebR/glRHxvIiYVe/KttlGzwV+W7LXxYyIWD8inhQRG0XEIvUu939bbKOXk+XkWPLOwabAQRGxR013GWCF0qJbWz3mflNKuTgiZkbEERFxfEScMYH9f0HdV2tDHguRZkR2c1yxtOgN1DJPg8pA7+7eVuQo3rOBSyJ7v/TeXwj4URnQLTkiDiHL4jbAfhGxX10+o67fbFpso/qdNsfJveOVgcZ5Mmo6R9eydRd5R+lH5ICarxyQl6jnwpXJu1P7RsTREbFczdOSkXeFV265315EPhf8YrLyvj5wTEScEBFL1e19fSnl/4aQp5UG5SkijiL32eHknOP/JI+TF9d0Fgf+XVrcZWl57I6732o6RwAfjYizIu/m946TmXXdFwH+2SKdA8jeEh+q392vrteLI2Ktuq0H5qem9UJyv72ArIi+uXcMN3x2vP1W0zmEfP58Z+AZkY/KNI+TxRhwfouxr93bRF53v0FOP/j2QevV51jyvL0yOQgjEbFW4/8+kuzSO66IOI4coHJDYJe6bnMa729CDsjZpr6zR/05MLIHz70w1yMrB/S23YB0IiLOjYjVG9+Nvs/sPyidySp5J/ZqcsDrt5A3WW4hg9//AraP2nO1ns9nDkozsh75QnKgu0cBX4mI5zfeb7u9TyKP369ExKIRsWHdJrNrOsuTN9QG3k2PiJVG25Zl5G75G8dbt6j1yfqd/0beaW/m/7Nk764JP17a0Z3ko0WXk+X67eT+ugR4ckQ8rmQPqDXJR+ra9Djo3RS8m6w7LR4R68H/1nlN8jGzQfvtDWQP6h8DD4+sbz2iplM67P/dyTFFbo3svUqpvcBqOhvXdR+oHm8HRMQL6rVp8TLS46F3/L04+toJw1ZK+Qd1IMqIOI085k4mx+/o1Vl7eX5Jm/zUcv12+rZFmbsn3Uvn9brNd1Md0fCnQBa6m8kCuDM5uu3uZEH+AjkoVJfpxS5k5A7yQuRjGO9jnCl1xkjnxYzMJX0YWbn+IjmIyB5t06mfW4Sc5uzz5N2R5Rrv9QbO2ZABg17VdP6P7FFwIDnYWW+2iF508yG0u+u2CPlM2wZkY/KjdR/8rwcFGalsk9bxZMAHcvTXz9RtdgzZbarNNppNnvC3qn+/hxzA5yLy5HZwm3Tqd984mfyMUya3JAc9emfHMt6bFnT3vjyeRN6NbTWQG9kN9N/UAaio86CTFfDz6TZt6cnUqUXr3xuTo6CfT8dB3MhK9Fvq62PI5+wOrflrNdc0eaz+om6TY8hB857LBAdIGlKeZtdtu3Vj2QXkIwC9v1doud9uBbaofx/KxAenHMpxwtjnyU7TgzbSO6SWxX1q2e7NEtBp/5GVv30af7+CDMK9FXjM/M4TGVB5buPvVchA8js7Hm9DOXbrvrqFvNv/MrIn1huovdvqZ9r2KtoNeHV9fRw5LeQp5F24LvO6D2Xww7qNbqH2viCDup9oc4z1pTPetXv3unzpjmk+tB53vev19fWYu7QeN2uQd9WXGpDOqrUMLlb//iJZoT+7HpOrk3cN73cXfZS0jqtl+iX1u1+jMYhfzdORLddvO7JXyE21PC1Sly/eSOtFXbbZRH/IAfd+S14P1iXrKg+p++/i+pnZwCtbpLVs3W8rNcrqE8gZEc4iH+VYbND2Ju/cngI8rf7duxFzY92fO3VYv5PIQSV3pW/w3bpes3vH5RjfH7c+yRT1MCF7C/yjHg+rk7NvfJScDetP5LPy0GJaXUYGue3NgPIVMjhxST32NibrVeOmRXbR/yEjM7L8Hxn0uKYee5vU7dnmeFu2bu+n123/E/I68L76/pJt0qmffSPZg643W8ldzD0F9XLAS+fjvrse+DVZl/9oXbf3AN+u7z+Elj26yLrlB+rrRcn69P6M9OZondaC9DPlGfDnf5WIDchuhLcCFzTeW528YLeaDqh+5xVkJaQ5Pckq9eA9j5aPZJARzRvJC+015GMXS5J3YK6kwwjcZMXv8+SdzneSlYCDGamcbECLUXjJCtZ76+uFyDsm57c5QY+S1kupF+f693I1Xw+pf29Hy4ocOW/2fWSl5k/U+cXJOzlXM04XwUYas8jnWSEfK7iZkZFgn0pWCltNeUVW+Cecn2GWSUaCM88lnwl9Rj2h3kAGKI6o+3Bgd7v6+1zy4vNB8k5b74J7ZC2X41ZoG+k9g7zQ7tZYNoNsvLVOp7Fdv0A2RN7LSGV5BfK4G7XLeV8ai5F3pVau2/5AslFyJhlcfCMdKrTkMf8FsvI20TwtTTZIlmWki+XhwHn19WuAl7dIZyXgWX3LTiXveD+m/t3m+O8NNPeU+vdDJnqcMPp5clUy0HEuHc8pZIX7uLp9lyQr8deSvSAGTu3YKN9PI3vwHEk+r30z2UPgaWRlslWFbRh5qmnsRFaID+5L9wDyujBwNg6GdOw20tmTnElgafI69Q5yNO7d6nHSav5yslL+bfL69JHG8kfUstFqBoRaJp9JNjp717SnAVfX168Fjm2Zn0P7lvWmdlu3979apLMjY1+7r2IC3ebJ8/5V9RjZnpzGejNyRPQzqF2GW6RzKPCK+npz8vqyNdmwORvYu2U6y5PBl14l/dF1O11Xy/zM+tPqOCbPvyeR16OP1O13LvDhRpmfpzMFkNOTrlbL9h/q9vgZGeT4AtlYvZPGrGIt0z2ZvkdL6n48hw6zc9Xj6z11n3+isfy5tZy2uVE1p67DueR1/+i671rPCEG7+uQy83Jf9eVnZfLm0yyyznURWbf4I9kj5Ad1+Zs6pLk38IL6ehPyPLVeLaen0HJKRvIc+QGyrrU7GYhZreb3OFrWKZi7Hndifb1/LZPfAR7aYd2WI4MlvUecVyIDTN8hg4tLz6f99izynPiiWp4Or8f+2fV4+2PdXq1nwmGIgbcF7WfKM+BPgTp6aH29NXPPx3sccH7H9BYmL4wvIysUvUrbTDIq3arSVl8fWU/Yn6IxdzUZ5VyzQ56WIweunF1PHs8gn+9/L9mI+g7tegKsTW0c17+XJSui19B9zvLVgcfU7bJwXXY5GTVflIwIjxs0IRs0valmNiRnPXgz8NTGZ24muzt3LRdLNV4vTDa+WlcEyQGBdp1IfoZZJpsXB7Jx+kmy8t+b13sOGR1evEOaa5HTp53RWLYEOQL30h3SOZi863MEjTEAyArcwHRoBOfIrukXko2vV9Zli9W0Wu9/Ri7ci5Dzzh9Wj5V/0HGqObICcj5Z+X/VJPI0u/F6TfL57G3qcbtm221U/27OVf0KWo5R0Nw2fcuWbLxufZwwyfPkKOnNIANwJ5I9Xf6PbCC+lBaVZeod0fr6yWSvosuAl/TWs5bvLoGumEyeGuk8gQxGnwps31j+XRp3B1umtQ5ZWXtbY1mrY5fRn/2eSVaQn0xeC/5N41rVIj/bkxXI7wEHNJZ/i45TlzL3tXNZMnD3BPKcO+5xMkpavYbyEmRg8biO/3+8a3ena2Xju8cBfycDW29qLD+E7AHV6hnh3jmBbEju1Fh+GFkBb9MwXYravbz+vSQZ6H4EcDGw/gTWb2dyhi7IgNBfyYbAhKYM7vi/jybPq1+v+b+JbLy/h+zyfkLdzq8BXtsx7f3IWcjewtzn8heQjc1Bwfxe788lyevJZWTdbY26fB0yQNgm2LU6tT5CBgROr/v8CLIu8EHGGberfq9NfbLTeWkS+613A+IWsiH53rrvLiADuGeR597tyR4LrYNSjePkETRuAJA9zj7S4Xh7FVkHOJfGdIfkjadruuSpfu8tZD1i/3rcv6mm3/Zm5QwyALVd/fuhtczPqcu3mg/77Y1kHfLIeh75A3m+XpIMmG1Vf44mB/fskvakA28L4s+UZ+DB/ENGUd9TD/LjGInWzWy8/wPa3yXfjrnvrr+NvBP1EjIC9zbgYy3SOaWeAIOsqB1BRu8uJy9mbwUun+S6z67r9yzyot26UVG/P6uxnRYlG82tuxBRo6k05imuv19U03o3jUdExknn8+TdxPUbyx7FSBfTK6ndzgek0xscbheyYbpU3/vvo0V0m2yAvp7GwDhkpa2Xn6vGy8+wy2T9zpfJrqgr1P2+MBk1723zi1pu683ICtAOzXLQeH1By220DdmQ3ZZsjPSeVT2HrHx/FDiz5bodQ2MQwbqOryYbWteQlbWB85WTdxSPZezB875FToPVJk+9srQ7WXlbj7xo3twxT08gG6LNstQrB2eQ82u/rEU6r6dvwEZGKkprkHdy2t4lOZRs2O4zxvsDjxOGdJ5slKWXMXdl70QyqNR7JGOJlmm9jb4B+5g7eHohcGqLdPYiG6CPbyw7gTxPbd02TzQa7+Q5dnvy8YePksHWK4F3dNhOr6TxyAZzD+zV9tg9m9o7ZYz3r6XdueSpZGN75/r31nX7f5Y8B3yI9oNxLt143T8w5OtoOfhh3UZHUbtF12W9423jWqYOGZDGPLl2kwHcVerrZWt+vsDIXbir6OtZMSC9URtCZOOk1Z3X+vmjyfEqLiIbJL1HvE6k3t3tmpd6rKxN9hD7MHmt+DYTGGy7w3o8hGzwL0peQ3ZmZCDU/yN7A/UCzU8ig41tenRt0Xi9BhkI+GEtI5uRj4wObCyR4y+s2tj/byZ7lpxEBoGubZNOI71F+/7em+xl9FGyYdq6R279/qTqk5PYb8uTPa6WIOtLL2fkkamH1/PIy+s+/XTbsk02sGfSeIyrLu+VgY90OU7qdxYnxxv632CtZL1wYDrNclT/PoAMIP2pUS7aXud657TDyR4Al9X1ObUufxUtgqeT3G9LkkGfDRvL9qv5uZUM4PTqp4+jeyDoIUwy8LYg/kx5Bh7MP2Tl/rXAvmQU87DGezPJyu6oIyCPktYjyAtPc2aIWWR31ZfUQn08A+5K1ZPgHWTldZ26bCGyu9RTyDtUh9DyLjB5l/yZZEVmA+5/IXki2aVo3Egr2eXrCLLBsFJjefMi+zXa3dl4InD2GO/1nhO9YVBa9X9eR1ZA3tnctuRzj0cAT26Rn33IBuhxZGX2bHIwxdXJSO429f8M2kZ7kHc3TiXvaLytcVLs5We3AWkMrUw2tvVXyNHhXzXK+6vRItJO3i36Dnmx/j55F+MJjf3/CLJyNCid3Ws5OQn4ZW+fkReAh5OVoye0XLdRZ8FovN6Fdl3vn0LeBTuFrLgux9zH8RzyItSmAtlfls6qZWmljnnao26nN5CR+43Jym5vex9CBhQGbe9xZ6/p8kM2Ar9BNvp73aR70zgtXMvAuMcJQzpP9pWlE8lzWO943Qg4sH6m7R2kXYEvNLcRc59PViXPMYO29551Gx1b988GZGX3MYxUItuMLr4jGWR7XF+ZXp48Rz6DOpPOBLbTGszds6ftsfvUmk7vnLY4cz87/RCyQdJ1G61et9ESdf32pv1MBU8gG53PYe4ATO/xoF3JQEDb81JvG61CX6VzUJlkyNfuRroH1ePq68zdg2NXRoLFl7RIZytgo8bfM5m7R8VLgC+3zNNKjAQF1yXvNm7T2O7XMcZMMqOk9cJ6fPV6hOxIXmP+zkhjqXXvuYn8kOf8D9OYaarm6WDyLmrzhsWVtGsEHkAGKc6gcceXvIZ/nLwmtwksvwj4Un09o7F8ZXKsjgMZ0KOg8Z3VyUbtqGPnkNeaw8f5/lDqk0Pcb2szMnBvbxs9vJbHT5PnthvIgFurcafIoMjF5LnszdRH3xrvPx/4fIt05jreGssfRwaVvkx9fLhlOTqdxjmfvLnzvPp69qB0Gt87kpH6yArkTYKHNY7dz7cp30PYd0eT58NVarlcnOz19B3yMaNj6meup/0Njx0br5er2+zHTDDwtqD9THkGHqw/ZJeY/2v8vQN5d6R353w5OkTE60mtV2F8GCOj1z+i8Zk2je0rye6wryNHOe/0rN4o6d1SD8pzyEbTScxdqXgW40xr1/jcN8io+oVkQ/Cx/evU9iJCXqB7A1MtSjaWlmBkEKb30uJuS83TE+t3P0mLnghjpNNmEM3lW6TzdepgeWSF61K6dY8eapms3/kq2YjdgGzoHjvKZ9qWy13r65nkXeEvUO/a1/04sAt/3Ua9ff8u8q7IV4DXT2C/fa+xvZ9GVnJ+RYtKWl86X6QGnsgAwJlkpejECZTt/rL0nFqeB97Z7kvnBkYGz3trTfcmMuixdF0+6jSrA7bRW+o2OrHvc23KwMcaZeBksnviLWTFeNPS4jhhSOfJMcrSGbUsvXICad1CnYaPPP7PIRuHb2bkUYw22+izjW10Gtkb5VdkkHDZDun8ijynnUw2GrZosx4dttNXG8fuwi2P3dOAfevrA8lz5O21TC4ziW30QTKwcBLdG9s/rsfGm8kA7s6jfGbgOXjQNmqZlysZ4rW7sW9+QvaaexzZE+AFZCNgD7ISviED7k7SblDmHWnR1ZnsZn0VeY66X+OTHDDuUy3X73+N5b7lT6LWS+jQWJrktj6JGgzuW/52ag8X8hz66ZbpPZ483722bq9Xktf0J9b32wzCN4tsUPUGEzyAvB58iAwAdamjHkCOdXAe2QvyVOa+Q7w6dbyKcdIYSn1yyPvtHWQvhSMa63kq2XOmN9bQsrQ7Ny3MyECcDyfPKV+tZaDXu+xRDJiiebzjrfGZFdvsv1HK0avIAPURjc+0vc41g1O9ngqLN97fjTruzHzYbzuQ17lfkAHIN9XlT69l/mgyCNC2V8nTyXHLLqe2T+ryNcleTvvTMvC2oP5MeQYerD9kNP0l9XUvMn4Fdd5pMkL5iJZpPYrszro92dj6RD3hnkh2lW47yNFe5LRWvb+Prif+XoOy6yBlTwI+0/h7DTJi91VaDuBUv/dS4IrG388D3tD4u3U0mtodvfH3JWTD9Gt0GFG2buu3N/5eu6ZzCvls2YxB+aLOJkFe6EcbRPPKuh/bXIg2po7cT30chOxu1bvI7U2tSMyPMlk//2TgnMbfm5F3zQ+pfw8cMIuRO+JvIi/cD228tyYZ8X1Dy/xs3dtnZGP712QFbUPyrvWovVbGSGsjxp+Zpe0jE71nUh9HViJ+XdPbiKwMvIdulbaxBmS9kqzItbk7vSEjzxEvUvO0E9mb5HLgopZ52ZDscTHm7DUt04l6PJ1CNrSeSN7J2pDsnfBmsqIzqFEzlPNkoyy9Y5yy1GoWjfr9Vev//wR5nH6l5nV9Mij4wUFloG6j2WQvhsPJY/lu8u70muR57nJaNI7Iiv2JdV32JYNAbyHv5syqeWzbi2esY+5h5B2yVsdJ/f7+ZINkNbIxuQHZFf9DtBissm6jRcgGe/82WoM8v32Ylj1oyB4RryOP1UeRXZvPIc9VDyF7VLQJlg/aRu9qkcZeDPHa3UjnScBV9fXi5HF3ItlguoCW40LQYhC9luksQp53NyYf7bq0lsun17xGfW/c2aJ624T7N5bfQh5vb2RkION5+rwzIw2rRYD/kNM9/xx4Tl3+WUZm7FiNDuNfkL0djyIbT28jZ444t+V3e9fe48lzdpDj0+xbt/V7adEDs6axMHnXdx1GejZ8jaxf7FU/M5txHnVgSPXJIe+73ja6lgwsvpgM5u1LBuC/0nYb1XQewdx13SXJc9vr6DCzVpvjjZaPKIxTjlqfu2saYx1vHyCD+ovV8r3KPN5nK5P1huXIANUnyOvAzWTg5PNMYAaGuu8uJXt13kBeZ3YDDpqKsjkVP1OegQfjD9l9fztGnrHtnZQOI59z3Bv4ZIf0tiO73b+R7LLeHK36ubS8+0reIehN6zaTkYroBUygCxlZ2fs02YhoPi/7FLLSPXCAOfIidhT1Oe6ar4cDX2985mwaA7MNSO8Asjvj28mK6Bl1+fb1QjBwNob6+f91S2/83oq8g7Nrx+20EGMPDvcp2t3hWpysSC3eKE+7khfshck7xduP8/2hlsn63RUZuZvdC1DsW0/aW3dMaw1yXIfnkBeEXm+SlclHJgY+c0mO27BMfb0hc08ZuXIt621nrphZ03gReQdvMrNgvLDu50uZu9v7UmRFoFXZbuTrVLJxM+GBBhmpTK9Io/FIVno/TosRtMkgwNrk8TupbVS/sw15kT6HfIa42RX/SwweaHQo58n6+cUa22hSZamxXQ8ieyVc1Fi+ONkzY+mW6TyNPK+9H/hc33vX0nJQV+a+a7QO2U31rXX7/ZPGuBqDyhEjXdMfNsp26nKczCKDSq8gx7hpPrbyKQYMwsbIOe0pw9hG9fPNRzfmkBXIY8jG/L20GEGfPC/1xqqZUFliyNfuvrx9mGwIXMfIc84Lk9fd19Eu4L0cGThZiNEH0ftum+1OBjPeV1+vBPy5lsmT6/5co+V6DWosn0eHhuAktu9uZKPqy2RD7TKyR98Xavn5JfVxkrq86/qtRe0xR/Ys+SHZ8L2ExiCwA9J6PtkN/EwaM/qQ58xWM2zVfHy48ffCNR8HksfywLwwhPrkEPfbzuSNoKvIO/jHkne7v04+jtcbP+u59RhsO9PI4jXNM+t6Hk8NAJHXgVaDFTIyaOVYx1vbQdB7AYg1gZMmWo5aHG/vmU/H27PJAN3VZA/j75HXqM3JwMaPGLmZsl3b462R/uvr9lmjHs9/pVHPeKD/THkGHmw/9eD+BtnQu5n6TBJZCViRjGz9gPbPcL6ckWn49iDv4q7TeP9FwAdbpLM2sCk5gFuzC9ny9QT3diYwai55V+ld9aT7EEYGY7uQdndvgozSrta3/MtkBeUYGlM/jpdO3zq9lbwT1Jy94O0MGPSqL4373TUkGwV/aHNyJO+sbFtfb0dG28+i+yCaQc7/3RygsPc82vvIC9qYg6cNu0zW7455x4kMnPycFtMW1s8vXNdxazJ4cB55928PsjL20ZbpzGbuGQaaz4MeB7ynZTrrM3KhXbLma+W+tAbOgkFWHtarrzeq+/AU6hRF5MX3wg7bfGWy0bV+LUvvmEBZeiR9jde+bXYcjcDAOOnMIe/a9oJtWzP388GtttEo6fYaXq8h7zI/mmxMjHsOYEjnyb51a87K0GxYti5Lo6T9UBozy5DBjzbbuznewnq1HBxLNlh6PQ4+1HLdtmeUu/RkT5dvt8nPOOk3z8OttlPftt2V7GnxXbLn2k51/w8caJBsAOxVX29Qt9Ebum6j8dapl1/yLleb/TZaV+RO5yXm0bW7kc7KZKDqBWRQd4W6/Bzg1V32X9826jyIXt1Pc+rrI6iP0JHn9nNozGPfMr3n17L0LibYWJ7Edp1FPpv+RPLu8fvIRt++ZBf5NclA6R217N7CxGaLOofsmfkzMvD0UFreOGmksQ8ZtLuNkTrLubS/WbUIGZj6BHlT5xTqtY0MKu7SMp1J1SeHuN9uq2X3uLqP3kFekz5T33smWTdovY0a6a9W99cXyN5OvePtElrWT0dJs/PxxsggrDPr8XU++bjDZMrReMGp1oGXCW6DmXXfbEYGyfeo5ebjjDxu1j/L28BHjfv+xwrUnhvkOfNmciyMr9NhuucF9WfKM/Bg+6kH5TH19evqCec88vmkbclI62Ut01qBjGA3p4R6BCPTKC1Go4E4Tjor15PiZWTl41KyIbJWfX/zejFodXDVE+nKjFx4XkpejE4gKwHPJxuU4w4OR97Rej1z34lszvF9OdnYHXdqx/r5xclI5JbN9W68XoPsojbu7AVkw+9y8uL/fkYaf81G124MnppzazJCu2Lf8p3IbnPvo90gmpuQjey31BPgkX3vX0KOMj7mNhpmmaxpbEkGfMbspk02Bse98173/3tquXl/3TZB9lI4jYy2n8eASDsjM1ecQ1YAVup7/+FkwGTgPMp13b7EGF3HyTuMA2fBIBsAnyMrkM1pnPYhn738IHkRapOnXpm8sJabVcgK3L5kQ/n8lmWpN2Dh/daNvBhvSt6ZGLRuW9R0LifvqvSPor5Rm21UP7ty3aYL9y3fpu7/i8g7QWOemxjSebJv3S4jKyLNu+QzyHNl27LUW7dRjxOygdhme29IPrv5IuY+V+5ey9gHyQbuoClw+9etf7rPJYB7+o+fMdKaw9wBl/4y0OqYI3td/YXGiP01HweS57zTycrpoN4Jm9a8v6dvn+3ZZRuNt98YuQu3ONlQHHR96+23F45SvqPNNmLI1+5Gur3B705hJOg5g2zgvKtuxxv79+so6TyO7N3yMEYZVJYOg+jRuHaP8f65wCu6rGf93lPI89TtTKCxPNEfMnj78fp6F7Kh+py6v05tfG7LWk7aDKC4ed3/5zPS6F6nptnlMawtyEbthxmZFnt98rr7I/K6++mW+61XfhYjx974XM1jLzB0EfWRjjG+P5T65BD3227k+XEPstG/LBnwuYk8H72ZPG+/p8M22pA8F72J7ImxEHkOmNX4nze1ON42IYNT5zBKL8K2x1tj3Zo9QXoDaHd61GGUtJ9GBpZuo47zMp+Ot6XI+tFS1EFOyaDrHmRdovcoTutZ3sb4P28kz5G/ZGRazAmPQbQg/Ux5Bh5MP+Szsv/XO0jriflY8m7raYwMCtL2Gc7DqKO0kg2/q8huOx+vB8ZLqd13BqRzJvVOA1nxuoa8IL2BkQb8wzqs58WMXHB6AwCtQFacjiXvlg58Xpms4DUHfmnON78u8Dda3CGpn/9gXc+LyIZac+7rhet7A5/Fr+v0crJB84bm/29sqzaj8Z9PHeyFrFi8huwmt2t/ei3ycyRZyd6fkXmzH9tI+2nzq0zWz/5vCjEyUPPIWlabA98NTI/smvZysnLzSvJOzSXknZvehbbNtm7OXPEuGgNukhfunajzYndctzXJO9WH9taNvLs77kwa9XOfJB8F2IC8mO5KNvo3IBtjj6D9dLHNMnkscPQEy2RzwMINyEruSdRpOsmK7eNbpPMxRsbueCdZSb6YkYEnt2uzjepnv09Wzg4hL/jNO9wrkoGTQd3Bh3KebGyj3rq9g+xCeUlj3Xai3gWf5LrNIs9xO7ZIZyOym+0PyLs/e9XlW5OV03Vp8cjEoHWry1t1K655+RWNcw8jDe7ZZGNn4DFHNkh/RlaCf8nIAJ9rMzJWTZuy/VGygXQOWTHfoPHeGjW9gduofv57jf22Ut9+6z0Pv2KLdDZs7Ldvk+MgzCDPBbPJRsS4ZYkhX7sb6fYGvzuPvJ6cWsvZFvV4OpIWj62RFfNbyPPvXmTjaxFGxnZ4Nu16Kj6FbFS/uh4b/QGqnWkR4Kif3Zxs0L6PPF7XJxtip9CxsTyZH7Jh83HyXHQfea5cmLyufJ5695dsyP2o5br9nGxkn0oeu28lG0y7UOtPLdO5gbxrfFHdh6sw0ttsETJI0eYavhcZrB11lgyyu/s3x8sTQ6pPDnG/LU+eT35OHbuh5ufZZCN887ofW22j+v1v1fU5jRyf4DzmHgfpYMaYKrkvna+TgeUPk8GJZfreb3u8fYs6PW9dtx3I89WzGXnUr+1A0b1gycn1WJtRl51Onhvm1/E2g7yu/Zw87y5al8+qy19f922rWd7qdzclz4fvqNtoKbKn6XsZGRdnno7BMp1+pjwDD7YfRp5pXhJ4bWP50uTzm2t0SGs1Rp6vejtZwVmNvEieSTYyx61skXcej2fuQQ5PJy/cF9HoltQyT/uQDcGlyLtIZ5GDKL6eDndJyIrLFxt/H1dPOp9hpPF2QMu0nkFeoBcjL4YfJRvel9YTyGz65n8fI52d+vK0GTkg0MMa+2PpFunMqNv3+Pr3jeSjEofWE92b25xcyUbIp5i7QntC3daX0nJwmyGXySeQz8uvUf++qublDLKh+ciW6awJXNP4+yFkxeH4+rvVyNsMnrliWdo/37hz3T+rj7FurcaFIBsvzXW7lXwe+Q1kg7DLtJw705iWsVEmN2qUyWVapPMYssHWP2Dhm8gK3x4t87MqjXE/yIv3iWTD/de0nH6psS4fJwdcu5K8C7sHI3e2VqPd7CeTPk+WkePtE6Os24vqurXubk02YD42yrr1xhtYkw4zDpBjnGxLHn/fJO/GX9vh+yvUbb30GPvtZR3SelQ9xg6q5fmT9PUMo+Xz2/XzzyYr508lg4p/oMXjO43vP4vGNGtkxfbYtt/vS2uLuj7PGGO/rUOLXg59+227xn67i8bAcwO+O9RrdyON0Qa/exvZ66z1uamxvW4gz5EXkUHUj9GxAUhe+99AVty37ntvmfrec1qm1d/oPoWRGW1aN5aH8UMGbn9Zy9FHgYPr8ouB59fXj6YGdQektR2NGSDIgOvLyV5wB3bI08uBK+vrRcnr02VkY7lTDxBy4MVew3Q9MjD0JGDzuuypjPN4KEOqT86D/XZg3W/PJq95nyODeXfSGFSxZVrPonE+q+t8M42Zllqm8wrq45/ko0HfJRu236LDwIDk+fnyxt/X1J9eULZTA5mRYMlbgN+RAap163uLkEHveXq8MfdjZD+p5fIsssfGbmS74t31/ffSYpa3+tnv1rJwDHmt+0hNc2NGbtLNlylMp8PPlGfAn7ojsnL7tQ6f741Y3TthzDXgGjmI0o4t09q0HggXkhfmb9Xla5J3T7uMMn85IwMoHkPezXkOWdH5JO3vAq1aT2JrkxX2K+qJ7ph60u4y0vELaFwIyYtYr0LRpYGzMSMzFPTukL+L2pAgK0qPaZnWOmTj5inM3eV9Tt2nrZ63qp99F/k824HUQf3qybI3xdqEIqRdy2T9zuZkxfGY+vvSunxJMtjx4g5pXUfeYVm0nvSvIwMAn6BOsdcijUeQjaLZjD5zxSW0n01lE7JydkJdt0v61u2lbbc3OSjQpWS3yG80lj+dDCi1nYZpTbKBPIOR50k7l0myQdkcsLA5gNZzaTmTRuP/X0Teebuhb1+8m5bnE7IC+ujG3y8gK27vrPviWwwYq4ThnieX6Fu3GyexbjvTeP6U7LbbXLebBq1br0zW348CrquvNyBHrf4Z+Vxqm+nhNq7rdvkY++2sDuv2UObuafX6mpczyccBrqExrVaL/fc46jSA5B2ue4A/1X3aJhC0KyMji88ke278Hx1GTW+ktUzffjuib799s81+a3x/M/K8NrNvv7W6BjDEa3cjzbEGv9uTDFau3TG9g8lG8WyyYfr3ut22oF3g/ChqA6eu4y9o9JSqZWTUO+CjpNXf6F6JbEBfRuOxmnn5w0ivvReQz6RfR9aPriWDZdeRDZVOjSwygHA9eb1bprH88WQ9bNxHRhrb8lBqD566vT9SXz+G7Ckx5uDOoxwrH2z8/cW6nc8j78S3GRRwKPXJIe23lcjzz5PJa8r/1f30dTLYdhoZfPkytX7RMt1tyXrAavXvZ5LnzN3JwFKbniAzyR5Fva78xzMSFNq95mvcqSYbaS1E1rFeQfYsOqUu34J8vOrpHdZttGDJt8lgSS/QNK9nUdmM7MlyIVlXuobsbfka4B9kYOg3ZNBzmQ7p7kV9ZKn+/dhaxpv1ywdN74RSCjPQlIuIxcgT5HFtv1PSP0spB5MnsvWBz0bEphHxdLLx9IWWaX2HvBN5A/m86vPqWw8nn/35T4t1iIhYiJzi6vMRsQhZsXx8KeWD5Inpd2T32zZ5up2MiD6LjE6/q5Ty61LKCWSFd+s26VTfBg6PiBdFxDbkyfrHZIXrsRGxQov1W5M6UEtELFpK+W9965PAyhHxjJrvLw1IZ62I2ImsXP2VvAv4uIh4XEQsQQYYViul/KNFOo8kL0QPIyuAW5GNQsjuxlvUPJVB6zdK+p3LZERsQq7X2eTo24uT5YpSyl/IrnwPb5HOphGxPRlMWJmMKD+NHEjoD2SldosW6exW//c7Sin/JqfkgtxWa0fE3mTj8lst0tqd7Bp3ANlFeQmy4d9ct43r32Nu74jYIyK2Je9sfJcsP59rfGQFslv5wH0WES8kt/EHSin3NY7TTwGrdCiTLyR76pxMNoh+RFYkexYnzy+D8nNkRGxOlpkbyOPrmsZHtiZ7zbQ5n7yCrGh9u7eslPJu8nxwO9nj6A+llE+Ol86wzpMR8Ury2DyFHMD0XDIoNJF1O4oMIHytkc9zyEdyflnX7feD1i0iXkY+kkIp5SvAeyLiQPKYO6aUsjZwa4tzycvI/f0GsiJ5LnlXsLluq7VctyeQDZjlImLJmrc3kZWtIM8Ls0sp1w9I54kRcTBZef088LmI2Is8X76eDCzeWUq5d0A6O5OV/1UjYvFSyr2llBvI42+NiDiiXrcGioh1yCDXH+u5jlLKWWSZ/BW53/7YYr+tGxGPjoj1yOD4O8nK6Qnkc8RrAz8ZtN/q/5/0tXuUNH9OBk53Ax4ZEQ+JiIVKKVeR181tB6xf9C26A3hjPQevQVbctyHvUt83IK3Z5PXt6Jq348kbAQfX7dfL899art7PgUUj4sURsUwp5c5SymlkwOzIiNiqZTqT8VGy/D2BbCR/gWzwHEVeO1cC3lZK+VfbsglQSvkNeQd5XeCpEbFJRMwupXyObAiOe82MiKjXnQtKKbfU/30VOStA7zryKXJ/tMnPH4HZEXF6RLyG7Em3D3nOWpVsOI+Zl2HWJ4fkEvLcsz95jfwIGQS6p+bll/W968ly3tb3yHPjcRHxbnLQ6k+WUj5O3ijYcrwv1/12LzlF94/rMfMVshcVNZ1rqfWTQeo540gyQLU62eimlHITWY7W7LBuPwV+ExGr9bJLPo7xRmC/el7pXD/t6KNkz6+fkdf/1ci626LkuffJ5DVrJWpZb+nHwD8j4rER0RvA/GayN9cREbHcfFi36WWqIxoP9h8ysvgwOty5HSWNGeRB8joyQvZGWnSRG5DmUmQXnlZTIDL3oIS950iX7vvMTQyYb5q5uyatQ97FvZUclG898gLyA2qXuQ5p7URW4N/e3Nbk4warDUhnJTLqfD15x/fZjfdWIBuGv6LeLRyQzvV1H32YrFCsQb2TT1a0vtAynS/Vnw+RI+42BxpbruZn4B2JYZVJRmaKuJJs8K9OPmLSe25zGbKxulmLdL5OVmSuJxu6KzLy3N7StTyM26uglt87yLvSZzbLC1kxuYmWM1c00rqJDHKsUZf3nsFbvuW6LUU2hm8mK1W9u0BXkw3vI8hnCsdNp1Hufky9W0heqGeSjaclyIb4LwetXyOd5owHyzIyvebSdb0HrdsKNe/NdBYh70jdRN6J+xGwact1+wV1jnvqbBjUuy912V9pcQ7oS3dC58lGfnrbeiHyefLryYrbUR3X7VayEnk1I4/PNGcy+Mugdavp/IQ893yirteqdb//oMM26eXnh+R5aIVaji6s+/M1HdZtpVq2zyYDZVuQlbZVGp/5U4uytFItM+8iu6JuSt69/wXwpQ7rtlLdHmeRDaHNa5lcqb7/5FoOxh2stH525bq/LyPP05eSY2+s1fhMm/22cv2fl5Ld9y+uv+8CvtulPI/zPzpduxvfW5w83jetf+9HBl9PpNtgyr10mo+5HFnX+9P170UZ8NgLI2NA9B5Pa55v31W3XetHZxrpPpoMlh9E9irp9Vw7G3jeMPbBOP/7GTQeRSIfwftULaePJM/jLyMbr22neF20bu/etKF7kI+rnUwGUY+ux86gaXUXq2Vn1DJMXtNvYvAgsc1HMBcnG28frNu317PzeOC4cdIYSn1yiPvtILJRviR5Xnk+WXfsBRa+UNfpKPK82Wbg2l5am5Ln3APIsS62rO+vStZRxn20g6wfze5b1twHK9dtNWgA3BUZGSdjJnkT5wLyhsebuqxbI83lGRk8+91k3a63fpfScZaICey3jam92+rfK5C9FL5BBmH7y+QbO6Q9kwwInkXWez/PyOPYF9CyF+0D6ac3SJKmWCM6PNl0ZpQBUf+W6cwkB836cMvPv5OsrB1dSrmnl0apd5Ai4nTy4n9Ei3Rmk3dq7q7LnkZ2u/4p2TD9WSnluA55en3J6D0RsUgp5Z/19dvI512fMyCd95J3jE6KiCeTEcinlFJuqe9/EPhLKeXwDunsUdN5ZCnlnoh4OHn37p+llLs6pPMUstKwWynlx/X93ckgydnjpTNIlzIZEecDPy+lnBARrycDJf8lKzLXkN3uVmyx/5vpvKGm85+aznVkhHmtkj1Vxs13RLyEvGjfRd5d/grZfftFZGX07lLKmHdJxkjrt2SF7YvknYMjyC6iq4+3bmOkszvZ8NqBLNuQ4z18Yrw81fTOB24ppZwaEQ8j725vTlZyriAHsfxvKeV5Y6dyv3Q2qun0um5/ghxTYbVSyqs7pLMx2bVxHTK4tAkZtLiptOsNcBh5XBwcEVuTAYC/kZXma8jK23allHMHpTXO/2h9nuzLz3ZkI/susrK8BTm42w0t1+0Cslv6mfX89LNSytt6eSIHq9qulHJ+x3R+Xko5PSLmkBXvW+vdyX93SOddwE9LKafX944gz583lgE9CurnzwJuK6WcXHth7EQGfv5ENkx/T47q/ZaO6exMBse2IZ9PPjGyl9ignhfj5efjpZSPRcRWpZQbW6zbmTWtN0fEpmRg8baarxPJgNf2pZT3dEznNDIodCc5ZeWv2+y3Af+j07W78b3LyHP2UmRQ+iXkwKEHkQ2DZYAvl1I+2iKd/1CneybvJl9P3g08s+Sdzjb5WYJsYK9X+nqRRcSiZIPkHnKsgf+OksRY6c4iHy17HFkmZ5N3mA8jGzi/bptWV5E9uI4m73T/tJfviDiUDHA9lyyjF5INvF1aHMPnk+eihYCv13Pw6uQjCguT19Avluyp0CadmWSA463Av0sp90X2NnorcEfJXiLjpbMouU3XL6XcUOs2TyNvfNxGXgueTT7KcOcYaQylPjksEbED2RNpabKethp5c2oDMrj8MXKcpxXJffCFFmleTO7r1cnel9c23luY3N5/KqUcMyCda8nHlF5c+npHRcRSZAD05yV7ig1KZ03gqJK9Gojsvbs9GbRamXw8s826rUj2IPxXRCxO9ka5g6xz3RQRvUeaH1tK+e2g9CYqIpYlxwa6maxrfbqUcm893k4kr+M306JMNtJcnbzZcX3dTzuQ54+/lVJ+FNmT7aqa1j3zZs2mqWFEJfx5cP+QFb07yQjkt4FX9r2/KXlnZ9A0gf3pvLbv/Y3Ji12b5y770zq67/01yIbluM9ekqMbf55GLwayYvTa+vqhZFR50N2W0dI5kzpVDhk5fWKL9RqUn5XJQY8GPls8xP0/aKaIU+vycQc/HCedvWs6b6nL244uvFXdVtvWMvg18qLWG5G/y8wV/Wl9vaa1V32/7fPl/encWNPZoUNeViQrwqfVv68k72o9p26nk+vycQeubJHOm+vyQQO7jpXOQWSl6Oi261a/P94giufQ8nnpIZbv0fKzFtlN8kQGzDLRSGedWm56d0R2Iu9AHd/3uXGfuxwnnTd1XK+x0jlpAttoFnmH56T69/fJZ9MfTd7Ne1eb43acdB5V0zlzkun08nPOoO3cSGvQ4IcvbbnfxkrndTWd1gNfzoMyvhcZcF2EbIQeTA7G+SG6P1fcTOdA8vx2ISO9ntqevy+r2+UqRhkvhWzI7dgyrb3Ja2fzzveq5LnuuWRvpYEz2AxhO88kH235FPkoUTByB/5y6qCXZMO1zSwa+5GPJ61ONvwup2PPrTHSuZRGryQy0NB23JPzyR4WH2akXrJMPYZ3IceOGHMaPYZUnxzyfluW7KV6W903B5J3pg8hG6N7dUxvtMEmn0ted1etnxk4o04tT0cx0lvtmlqun0fWcVcAnjWBdHpl4VkM6NkyRnrXkj3fnjLKe71ZFU6YT/tuTfKG1OVkELF3vL2F7Cm6G3lNaDW1I9lrbv8x3luo7sP5sm7T7WfKM+DPgv9TLwCvqa93ICsAX6FOC1ZPbgMHvhsjna9SB4EhB4lZb0h5eiHtB6nZhNrlvv69JXk3CRpT7U0ynSup00hOMp2rmcfdNsfI01gzRSxVL3JrDiGdz9BhMM76vUPqBWMR8hm6l5J33p8wgXUcLa2P0SIQNCCdo+p+a91FmawsXEZGxr/cWL5M3d6rDyGdz9Byho9x0lmWvDvdar8xeBDFLzIfKv4d8nM9jSloB6Q1i5Hu9r3egRuT56ZWM9YMSOfqIaVzFR1GhW+k9zDybs+nmHtmlZlkZXzbSaYzg3zMq+1MMePl5/q26dTvjDf44Xm0nylmvHTOaZvOPCjnewHv6W2fxvK3kUGctus3Vjpn1HRaBbq5f2DiALIx9zE6dLmuae1PTsv4frJ32bK0DCYNcfv2BsxdjmxAFnIAxgvIngm7kr3UWs3M1Ej3ShrXMrLnw9mNv3ceUjoDZ8Gqn+sPTFxG90fThlKfHNJ+a5bhj5Ln/3vIel9vrKE/1HVeukO6Yw02+VayftIqSF2/P4vsTbQkcHg9Tu6jZV15HqTTH5y4jgzKH0rWExah5fTDk9hvS5H15L1rmfx2LdOnkTetvkA+otZqZq5Guvsw90xv+5FjRp3VO3aZj4Gu6fYz5Rnw54HxQ6PCQUbpDqwnkh+R3Xonm84PyQHGhpGnH5LdDdumE33pLEVG4E+gMf3fgpbOfCoXO9FxpohhpkM2Ks4iu/CfUZdN6GI2rLSGnKftadylm8R2mlbp1O8+m7zL8Q2yIfb0YZSlSZTBoeWHue9M7ksGTjuPeTLd0qnfX4FsQJ5M9l55DI2ZTBbgdDYnxwI4ipFnZfegMSPG/ExnmD9kT7tLqL0tGssXJxs/bWfCGVY6ezF6YOKtdAhw1O+cQgYkDiB7h51J9hBbpL7fu2Exz4IM9X9eRAY1vl33/UvJMXW+RwZKDq+ffVqL9IK89j+DbDg1A4I31NdHkAMsDiOd97VczysZToBjKPXJIey34+p+O4McAHuZevz+vO6zi8meSh+g/SwaC1GvkWTj+h2MjBOycE1rq5b56415cCIjPRM/RwZg7gb2G0I6dwHP7LjdhhKcmMR+u5YcXPgbNf+fJ2d4OZMcT+JKRno/LdUh3cvJmwtLkY+VfqIeH2+taS0xr9ZpQfiZ8gz488D9qSeTv5DPAU55OkPO0+n1BLnjAymdIe//xcjK02S39aTSIaPUVzIy+NaEK47DSmuYeWqkOZu8IzDZ7T0t0mEeDDY7yfWZZ/khK96tKtsLQjrk88Snko2lzzLBAaqmWzp9aU5o8MN5lc4E/3dz4OJtyMr2d8nn3RciAwS3ABvOj3QaaUw6MEE23maQwb9eg205spvzF8iGwCnAZ+bxNt6bHABufXIKu9PIR4uOJaeJfkvX/DB3kKV/QL53kT0evsaAwVSHmE6bwMThDAhwDPgfQ6sHtvx/zyQfSdyBDHC9gey1dxD52MOqjc/eRIsBIhnSYJP0PTZUj4tTah572/uRDJjidVjp9KUxKDix7zzeb4+uZTbIwMabyZuIPyQfpVmxfm4R8pGutueSWeRYIG+qx8btE0nrgfwz5Rnw54H7Q97R++h0SWfIeXoYcOIDLZ0h7vtJz14y5HSWrr8n3aV4WGkNOU9BNnZf+EBKp5Feq+eu59fPsPLDkO6KTrd0alpLkV1c13wgpdNIbyYd5mSf1+lM8H+/k7yTt0Jj2YvJO8GXkI/RnDEf0xlaYIK+xnJf2uuRd5f/Q4tZdSa5jZ9HNmpmMtKIXJfspXbURPJTt/d5NEbuZ+SxiueSNxcGjoUyxHSGEpgY8D+GVg9s+f9eTvYkeSfZO2FdMrhwFvXxi/q504GzWqbZ295zxth2rdKq6ZzTl86B5COH9xu3YF6nU7839ODEBPfblmQAYytGZopZnXxc5TYaj4MysUDQ6uQArof1faZVWg/knynPgD8P3J96AW3dnWhepzMP0hpWo2JapTPkMjDtGjr++OOPPw/2HwYPXLwjeSd20KCsQ0mnfnYogYm+tJqNpdmMNOpPAS6eD9t5fXLMjs/28kMGR7Yku0mv2yU/o2zv/kELt6jbq+sg2BNKp7GtJx2YGPA/hlZ3a/n/tifHOPpDs2yTPRc+Tj4+1XqAyBbbe6KDl7+q8d6qHdZvKOn0lYGhBCeGsO9eTj7ucDmNAHJd196g3G+leyCoeV5qjqXUOq0H8s8MpHmklHJvKeXP0yWdeZDWpKfnnI7pDFOpZ9vpko4k6X/eXkp5Adlo3zYivhoRz6jvbUE2EO+dH+lExDbkuCT3AddExNEApZR3lFIeRjYG9icbC+PqS+uzEfHKmta/S04btyw5w8khLdZtUkpO53wh8FhyBqYvk+Mn3ETe9Z41gfw0t/f2EfGViNirvrcpOQr9X+ZHOo1tfS9wbWNb/6d+5CZyJoSTW67bqIZZd2v5/75MNiTvJsvRW8mxN15CzjCyKNkoP6XltobRt/dT63td0mqm8+jefiul3B4RL4uIDSeQnwmn0ygDhTzeXgVQSrmQ7HFzdcv8TFpELE0eSxuRvRSuj4jz67rsAvw2IjYgA3vjTodd02uW72si4jUAvbLYJa0HurCeLkmS9OASEbNKKf+trxcip4k7gJxicVYpZd35lU6tuD++lHJKROxATj37UOD0UsrlEXEU8PFSyk8mmNYccurhKyLiJcBnSynfb7N+ExERTySDKbeSj9usSd4RPgH4NzkA37dLKY/vmp8B23vhUspa8yudcbb1qaWUKyPiQOAHpZQb2uRpqkXElmTj/lvA38heLY8jZ2H4CdnlfS2y58LsUsraHdIeb3u3TmuUdJ5d01mxpjPR43ai6QwqAy8DPlVK+WGb9CYiIo4jx8pZjRxr40fkDFOvJ4/DJWsenh0RhwI3llJuapHuoHNJ67Qe6AwoSJIkiYhYEvg18IxSyqfnZzrDCnAMSGtlsrG0Tpf16SIitgfOJqcsnUEGD+4lu03/mByocXVyyshe421S+ZnG+611gGOqRcRjyDESvk42QH8F3EE+k/8bcprAr5KzPPyMSW7r+j+nbL8NM51hBScmIiK2InsBPZcMAr2SnJr76FLKuRGxKPmoyv7k8b/wkAJBndN6IDOgIEmSJCLi2eTUhXtPk3SG0lAadloD/s9pwC9KKe+MiOWADcgB6dYkR76/c9j5eSDvt/klIs4m7zafHxErkoMwbgH8s5RyXONzQ9lGw0xruqVT05pfx9uhwKNLKQc1lj2cnO7zG8AbSin/WRDPJQsSAwqSJEkiImYCi0/2efUhpjPtGm8t/s8+5DSQB5dSfl6XrcbIYxzPLaX8a8jr9oDdb/NLRBxJzkhxaCnl13XZRuRjKn8Enl8bpkPZRjX96bbfhrlu8+t4eyg5Q8b5pZTPN5avBhxPBhRuWxDPJQsSAwqSJEmadqZj463l/zqFkenqLu0NUhgRnyMDDb+cn/mZ3xbUdYuIt5PP4H8e+GKjq/vHyf12z1Tmb0Eyn4+3p5M9Eq4DDi+l/L0u/w5wZCnl+gX1XLKgMKAgSZIkTULtBr1uKeWmiFgDeDQ50vxDga8BywO7lFK2m8Jsqk9EzCKnF7y1jtq/N7Ac+Tz+j4FVybvR205hNtWnjlWyKvBP4IfAL4G3kWOTXEEO0LhQKeUpU5bJBxEDCpIkSdIE1QDCe4GFyAEYP0LO8PBncvC2g4DPAleUUn4xNblUv9ot/mxy8Mzlyekhf0oOKLkG8FRykMbzSik/m6p8am71ePsYOUbCb4F1yAE0TwAWBx5Dzsrx81LK3VOVzwcTAwqSJEnSBEXEe4BfllLeFBGPB3YnZ3e4rpTy0anNncYSEReSjc7jImJvciaA3wNnlVK+WT/zv1H+NT1ExJvJmV9eXsdQWJ4M/mwCHD8vp6jU6GZMdQYkSZKkBVGdSu73QACUUj5XSnkZ8GXg9RHxqqnMn0ZXpxO8D7gNoJTy0VLKU4HvAR+JiOfW5QYTpp9rgOUiIkopd5dSfgC8B7gZOHxKc/YgZUBBkiRJmoA64OIFwMMj4pkRsUpdfjGwH7BhRCwylXnU/ZVS/gGcDzw2IraJiMXq8rcBzwE2ioiYyjxqTN8AFgWuj4inAJRS7iqlvBnYLCI2ndLcPQjNmuoMSJIkSQuiiFi4lPKjiPgAsAewRh1d/iZgbWCbUso/pzSTup96d/vLEbElcDTw8Yj4CvAzYFlgp+Jz4dNKRCwOLFNKuaP2IHky8OyIOIgcC2Mt4CGllO9MYTYflBxDQZIkSeogImYDrwUeQo6XcCywIjlLwLLA9sCvgdNLKddNVT41t/qIyuHkHe7Z5MwAawGvAP5Dzg6wOPDGUsq1U5VPzS0iFgYuBDYAbgG+CfyIHHxxJ2BfcrrPT5dSvjJV+XywMqAgSZIkdRARbwNWB94CHAA8gexCf2op5b6IWBZYtJTy6ynMpvpExOnAusAlwGOBZwAnllJOj4jVyZk6ZpVSbpnCbKpPRJxG9k44JCJ6AzAuDXy/lPLeqc2dDChIkiRJLUXEEsBVwDGllK/WZesAZ5CPORzQmyVA00dELENO3/nsXsAgItYF3k1O9/l8p/WcfupYFq8B/l5KeXtdtgywCzlOyRWllPdNWQbloIySJElSW6WUvwIfAp4SEUvVZT8tpexBDtD4yKnMn0ZXSvkj2TPhiY1lt5ZSdia70G87RVnTOOpYFtcA+0XE8yNiTinlj6WUS8hHVp5SAwyaIgYUJEmSpG6+DKwPnBERG0TEzLr828CznCFg2voccHBEfLAXDKpuBg6akhxpoNrj543AesDhvdkdgNvJxx/+PFV5k7M8SJIkSa3UEeU/UEr5cUQ8B3gTcBnwqYj4LzlA3PnOEDC9RMQmwCLAD8kBM88BfhYR7yYH9jsCOHfqcqjRRMTSpZQ/1T+vAf5B9iR5Yh0P48fAW0op901VHuUYCpIkSdJAEfFSYNdSypMiYmYp5d66fCVyysh/APeWUi6awmyqT0QcATyVnMXhW6WUY+rytYDDgHuAf5dSzpy6XKpfRBwObARcCnyvPrJC7VmyELA88N9Syk+nLJMCDChIkiRJ44qIJYHryQH9flC7XD+SnHrwJuAi75JOP3W/fRl4ChlQeDe5v/4J/KqU8qEpzJ7GEBGLA7cBXyKnX/0O8FXgJ6WUv01l3nR/jqEgSZIkje9AYFHgdxGxBfk892+AW4BdgS2mMG8a25HAl0opvyTvaj+a3G+/AfaIiG2mMnMa04rAq4BnAp8EtgSeB+wZEQtHxAkRsc9UZlAj7KEgSZIkjaN2s341sDWwKvDOUsrZ9b03AfcBxzl2wvRSeyhQSvlLROwLLFNKeXd97zhgJvAG99v0EhEzgKUajzksARxABu7uBZ4BbFpKuWPKMqn/cVBGSZIkaQwRsXAp5c/A6+pz9/sAlzc+sirwfRul00tEbF5Kubk340adZpCIiLqvVgd+4H6bXurjDjPIIB11vJK/AmdFxKLAr4C3GUyYPuyhIEmSJI0iIh5LDui3PvAx4FvATaWU/9T39wZeW0rZeupyqX61N8Krge1KKf8a5f2nA68qpfjIwzQSEU8Enk+OTfJF4MxSyj8jYkYp5b6IWBG4qpSy7ZRmVHMxoCBJkiSNIiLuIJ/DfyiwCrA08AvgCvJO6fOAe0opV0xVHnV/EXEjGei5NiI2BjYG/g3cSQ7wdwhweynlyqnLpfpFxLeBV5I9FI4AbiylHN/rVVJ7mzyklPLbKc2o5uIjD5IkSVKfiNgQ+GYvWFCfx388sAM528OJwLlTmEWNoo538f0aTAjgPOBW4G/Av4BfOEXk9BMRzydn3rim/n0rcGpELFnHwFgf+J3BhOnHWR4kSZKk+7sdWCwi3hERq5ZS/lJKuYpsoO4ZEbtNcf40ijrexcyIOAt4PfCpUsoBwJuA5ckxMDT9fA04LyJmRMQiwE/JmVUeVd9/J7D4VGVOYzOgIEmSJPUppfwF2J+8s31YROxW75b+EPgGOa6CpqdDgXvIfbRORCxRSvk1cBuw3JTmTKMqpXwb+EQp5b5Syj/rYJmfA7aMiGPJR1R+NbW51GgcQ0GSJEnq03hueztgR3IchfXIsROeBGxbSvndFGZRo2gM4LcluZ92BP4A/BzYA3hMKeX3U5hFtRQRqwEfJ6eK3LH2PtE0Y0BBkiRJ6tNrmNbXSwIPAxYjgwpfLaV8fyrzp9E1poWkTjO4Edl1fi1yTIwfTGX+1E5EzABmAp8Cri+lHD/FWdIYDChIkiRJo4iIhYB7e4EFLRjqYIwUGzrTWkSsDPyhlPKPvuUzPOYWHI6hIEmSpAe9SM+NiBdHxDERsXgp5T+1+/yM+pn9e681fUTEdhHxmIjYoresGUyIiF17QQZND3V/vIUcKHOsz7wpIma676Y3p42UJEmS4ERgS+Dy+vuXEfHWUsopNaiwBrCMd06nlzpgX28mgO9FxK9LKXc13t8EWMveCtPOG4C/l1Juq7M6LA5sWUq5ph5vs4E/l1LundpsahAfeZAkSdKDWkQsBXwBeFYp5Ud12RbkVHV/BZ5PTiMZpZT/TlU+Nbc6aN+15DgJSwGnkuNcHFxK+U/9zAxgoVLKv6Yso5pLRKwI/BDYq5TyxYg4EVid3I9Lk8fhN6Yyj2rPLluSJEl6UKujx19CzgjQW3ZTKWV74NvAVqWUew0mTDvPAD5fpxr8I/AScjaOVQAiYh9gTYMJ086KwIeBXSLiTODJwCtLKVsC7wF2msrMqRsDCpIkSRJ8HXhBRJwTETMby28EnuvYCdPSmcCZdfyLWXVwv18CD4+IVYBXA3dMaQ51P6WUm4ETgO+Ss2+8rZTym/r2TcCOfcegpjEfeZAkSdKDVkQsD/y+Pre9FPAu4InAacDPyLvel5RS3jWF2VSfiFi/lPLjxt+zSyn/jogDgR2AOcCnSilnTVkmdT91LJJ/AguXUn4VEQ8F/gH8tZRSIuJK4JOllHOnMp9qz4CCJEmSHpQi4lnAHsAiwM+BbwEXk89yHwn8ArivlPLmqcqj7i8ingccDnwIuLqUcmvjvaXJO9+3lVIePUVZ1Cgi4gDg6cBdwEzgN8B7Syk/qe/vDTyvlPLkqculujKgIEmSpAediFgYuAXYGVgUeC6wPfnow2mllJ9PYfY0joh4BjlQ5ufIwfw+B/wUWKGU8smIeBVwQynl81OYTTXU4+2HwBPIHglPB54N/Bq4sJRyZURsBPyzlPKzqcupuvJZMEmSJD0YbQzcVEq5tZTyXXIau58CdwKvBIiImML8aWyfAv4A3At8FdgWuBrYDqCUcqrBhGlnZeBbpZSfllJ+DZwD/Ai4khyccZFSyg8MJix4DChIkiTpwegWYEZEnBkRu5FBhD+XUk4EVouIrYpdeaelUspfyUH9KKVcCPyJfC7/IRFxUkTMnsr8aVS/BmZGxCfqow9vhP/tv2VpzLCiBYsBBUmSJD2oRMSMUsrfgBeQswC8Cgjg9fUjfwM2m6LsqZ3bgK0j4mnAc4AXkuNfXFdK+feU5kz3U0r5VynlqcDHgIPJAMPL69v3AutMVd40OY6hIEmSpAeNiNgCeBKwOfDCUsrv6vKZpZR7a2+FE4AtSyn3TV1O1RQRmwBbA9sAry2l/DEitgTeAfyglHLolGZQo6r77bHk4yivakwP2Xv/icDJwNYebwsmeyhIkiTpweRc4O76+oSIWKMOGNeb934F4GQbN9PO+cAS5HSQL4+IZUsp3wSOZWTMC9s2088FwML19dcj4nF97y8OvN7jbcFlDwVJkiQ9KETEy4HHlFL2iohFyekFv0kGEa4spZwxlfnT6CLiFcCjSil7R8TqwCfIKT43AM4opVwypRnUqCLipcAO9VEHIuJQYKVSyvFTmjENlVE8SZIkPeDVGRv+DLy6Lnol8O1SyjPJsRNeEBGPmar8aXQRMROYDby2Lnoe8NNSyoHASWQvk42mKn8aXWOGlHfUv2cCXwN2a3zmnIhYcgqypyGyh4IkSZIeFCJiEeBf5ACMmwA/6g3gFxFvA75fSnnPFGZRDRERpZQSEYuVUv5e99+O5MCL/6mfORv4or0Upp+IeAiwWCnltsayLwNHkeOYbFxK2W+q8qfhMKAgSZKkB7Xajf4qYJdSyl1TnR+levd6cWDtUspX6rLoTecZESuTjz88qZRy99gpaX6KiDWA1UspX2os6wWH9gb2A9YEnlxKuWeKsqkh8ZEHSZIkPaBFxN4RsUqjG3bzvSWBY4ArDCZMO+cCbwKOiYinADSCCcsCJwIfNZgw7ZxLBgz+p4zcxf4OsCvwYYMJDwyzpjoDkiRJ0rwSEfsDFwIfBC6PiOtLKX9qfOQ+4APNu6maehGxDzmjw/OBpwJPrF3oNwLeDtwDXFtKuWjqcql+db8tUsr/t3c3LzbFcRzH398Go4TIymORJCVJ5CFZyYqUiSwmZSPKQkRYiZLkISvKQnb+AmFhpUTJkmSpyZKSML4W53dzXI+31Jl77vu1Orc7i299msX53PP7nrxdPu8GlgNzgAuZ+SoiDmXmrSbn1P/jEwqSJElqsxXAPuABcAQ4FxHrImJy+X6LZcKENAKcy8x3wAKq3QmfqfZfXAe+WCZMSCPA64iYERHHgFGq17R+BK5FxJBlQru4Q0GSJEmtU443dJYvjmXm24iYDRwFNgB3gIXA6szc2tykqiu5TQI2ZubDsojxAnC2ZDgM3ASuZuaTJmfVdyW3IWAHsAqYXa7XZOZYyfEmcDkznzY2qP47CwVJkiS1TvkldLxcT6H6Rftr+bwUOAPsorrhed7cpKrrym0oM8cjYmb9mEpEPAP2ZOaLxgbVD7pyWwhsA8jMG7W/MbcWslCQJElS60TENWAYONVZ/laKhfFyk3oeWORr6yaWkttU4GQtt/rN6iWqM/oHGxxTXWq5ne4sN42IGeXIChFxkeoVkubWMu5QkCRJUqtExFqqpw++Ag/KWW4y81MpE2YBj4H9DY6pLrXcxvkxt06ZsBKYDxxvbEj9pCu3exFxAqBWJiyjWrBpbi1koSBJkqQ2upqZB4DDwKaIeBQRO8t3o8DLzPzQ3Hj6jT/lthY4n5nvmxtPv1HPbX1XbpuBK+bWTh55kCRJUutExKTM/FKuJwN7qYqEucCUzFzS5Hz6tT/kNo8qt8VNzqdf+8v/27C5tZeFgiRJkgZCREwH3gAjmXm36Xn0b8ytP5nbYPDIgyRJkgbFduC+Nzd9x9z6k7kNAJ9QkCRJ0kCIiCFgWmdZnPqDufUncxsMFgqSJEmSJKlnHnmQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9+wbaJqoZ/Z7SCAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAGMCAYAAABanuA0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAABriklEQVR4nO3debxvU/348dfbvbiEi1zzmAaFDEkSvwzpq1A0a0JKg4pmSbmFUpkqvkoD+jZQKgoNMnxLAwmRNI+kaBAN3++3WL8/3uvT2efj7PPZn3M+1zn3ej0fj/M457M/n73O2uuz9t5rv/faa0UpBUmSJEmSpIksNdMZkCRJkiRJs5eBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJJmRETcEBE7zXQ+FrWIODoi/hARv5vBPOwUETfN1P+fDSLiYRFxVURE3/KFEbHhDGVrkZrOPhYRD4+Ib442R5KkxZWBA0nSyEXELyPicX3L9o+Iy3uvSymbllIuG5DOhhFRImLuIsrqIhUR6wOvAR5WSllzpvMzG0R6eURcFxF/j4jfRcRlEfGsxmcui4j/iYg7I+KOiPhuRBwWEcs2PrMwIv4ZEX+NiNsj4psR8ehJ/vVRwHGllLIot282ae5jtbw+NsS61wG3R8Reiyp/kqTFh4EDSdJ91r0QkFgf+GMp5dZhV1xcgyUdvBc4lAyo3B9YBzgC2L3vcy8vpawIrFU/+yzgwr4eA2eXUlYAFgCXA5/t71EAEBFrATsD5zaWbRcRVwKvB66OiMsjYoWRbOEkFrPv9ePAi2c6E5KkmWfgQJI0I5q9EiJi29qN/I6I+H1EnFA/9rX6+/Z6Z/nREbFURBwREb+KiFsj4qMRMb+R7vPre3+MiDf3/Z+FEXFORHwsIu4A9q//+1v1rvUtEXFyRCzTSK9ExMsi4if1DvhREbFxvcN9R0R8qvn5xnqPAy4C1q55P6Muf1LtQn57vbP+0L4yeUNEXAf8baKLzIjYNCIuiog/1bI6vC5fNiJOiojf1p+Tmnfo+9IoEfHAxuszIuLo+vdOEXFTRLy+lu8tEbF3RDwxIn5c/+/hjXUX1jL4aC2fGyJim5b/+2DgZcCzSikXlVL+UUq5q5RyeSll/4nWKaX8rd41fxLwaGCPCT7zT+BMYE0yGNFvN+DqUsr/NJadCXwKOK6muxC4qyXf+0fEN2rd+EtE/DAidm28Pz8iPlzL6ubIx1Pm9K17YkT8sf6f/vTnRMThEfGzWobfjYj16nvviYjfxFjPix0b6/Xq89l1vasjYovG+7+MiMdFxO7A4cAza138Xn3/gIi4sa7784joDxJcBuzaVo8kSfcdBg4kSbPBe4D3lFJWAjYmL+gA/l/9vXIpZYVSyreA/evPzsADgBWAkyGfYwf+E3gOead6PnlHu+nJwDnAyuQd1buAVwGrkReQu5IXt03/ATwC2I68Q30a8FxgPWAzYN/+DSqlfBV4AvDbmvf964XzJ8k77guAC4Ev9AUe9iUvjlcupfyrmWZErAh8FfgSsDbwQODi+vabav62BLYAtiXv5E/FmsA8suzeAnywbu8jgB2BN0fERo3PPwk4iyzTz1O/jwnsAvymlHLVsBkqpfwauKr+/3Hqhe3+Ne0/TLD65sCP+patAXwLuBv431LKV0sp/5gkC48CfkbWkyPJ3g2r1vfOAP5Ffh9bAY8HXti37s/r/zxmgrRfTX7vTwRWAl4A/L2+9x3yO10V+ATw6YiY11j3ycCnG++fGxFLNxMvpXwJeDu1h0YppRdcuBXYs/7PA4ATI2Lrxno3A/8EHjJJuUiS7gMMHEiSFpVz61312yPidvKCvs0/gQdGxGqllL+WUr49yWefA5xQSvl5KeWvwBuBZ9W7808DvlDvYP8fedHb/0z7t0op55ZS7q53vL9bSvl2KeVfpZRfAh8AHtu3zrtKKXeUUm4Avg98pf7/vwBfJC8Wu3gmcEG92/5P8m73csD2jc+8t5Tym5aL2D2B35VSji+l/E8p5c5SyhWNcnlbKeXWUsptwFuB53XMV79/AsfUPJ5FXiy/p/6/G4AfkMGJnstLKReWUu4C/qvvvabVgHGDRNbeDbdHjmmwwYB8/Za8QO55Rq1bvyGDGvu0rLcycGffsneTjy48A3h6RKwx4H/fCpxUSvlnKeVsMhCxR13vicChtXfErcCJ5KMV/853KeV9tY5N9L2+EDiilPKjkr5XSvkjQCnlY6WUP9Z1jweWZfyF/HdLKefU7+oEMuCz3YBtoaZ9QSnlZ/V//jfwFe4ZmLmTLD9J0n2YgQNJ0qKydyll5d4P97yL33Qg8GDghxHxnYjYc5LPrg38qvH6V8Bc8m7u2uRFJACllL8Df+xb/zfNFxHx4Ig4P3KQvjvIO7Or9a3z+8bf/5jgdddn48flvZRyd81Ps1fEb/pXaliPvOs9MO3699od89XvjzUIALl9MPk2N4MBfwfmTfSYBfldrNVcUEpZlyzvZYF7jE/QZx3gT43Xn6r1a/VSyi6llO+2rPdnYMW+/3sMGbC5kexR8qOIeOQk//vmvoEVe+W7AbA0cEsjSPYBYPXGZyf7TmGS7zUiXlsfJ/hLTXs+4+tns77fDdxEx+89Ip4QEd+uj5/cTgZA+uv+isDtXdKTJC25DBxIkmZcKeUnpZR9yYutdwLnRMT9uGdvAci7zs070+uT3cR/D9wCrNt7IyKW457PvPeneSrwQ+BB9VGJwxl8ATtV4/IeEUFeNN48Sf6afkM+njEwbbJcftvy2b8Dyzde31szPlwCrNs2BsJk6jP/jwC+PoX/ex0ZmBqnlPIT4Hryjv/ZwH6TpLFO/b56euX7G+B/gdUagbKVSimbNv/VgPz9hnxEZ5w6nsHryV4Rq9QA3F8YXz/Xa3x+KbL+T/S9j8tDfbzjM2SvlzVq2hc2046IdYBluOdjHpKk+xgDB5KkGRcRz42IBfWO6e118d3AbfV382L5k8CrImKjyFHwe89u/4scu2CviNi+jhuwkMFBgBWBO4C/RsQmwEtHtFkT+RTZvX3X+hz6a8iLzm92XP98YK2IODRyMMQVI+JR9b1PAkdExIKIWI18TKNt+r1rgWfXQfl2556PZiwSpZQfkXfjz4qI3SJiuTqI4PZt60TE8hHxWOA84Ery4nZYFwFbN8cGiIgXNQb9m0eOT/D7iVauVgdeGRFLR8TTgYcCF5ZSbiG7+B8fEStFDt65cc1zVx8CjoqIB0V6eETcn6yb/yL3g7kR8RZyPIKmR0TEU2oPj0PJ+jTRoz6/BzaswQXIgMCyNe1/RcQTyLEZmh4LXFJK+d8htkWStAQycCBJmg12B26IiL+SAyU+q44/8HdyMLlv1G7g2wEfIZ+j/xrwC+B/gFcA1OfvX0E+l38L8Ffy2fTJLnxeCzybfJb7g+Sd50WiXjg/F3gf8AdgL2CvOh5Dl/XvJGcI2It8POAn5CCRAEeTgwdeR95Fv7oum8ghNY3bybERzh16Y6buYHJKxhPIxw5uAo4ix3/4deNzJ0fEneQF70nk3fHda3BpKKWU35O9HZ7cWLw9eSf9ELIXw5/r/2lzBfAg8ns7BnhabxwC4PnkhfgPajrn0PdIxgAnkEGlr5BBrA+TY198mRwI88fkoxH/wz0feziPLLs/k2NaPKWOd9Dv0/X3HyPi6lqXXln/75/JfeDzfes8B3j/ENshSVpCxfjH9SRJWnLUHgm3k48h/GKGs6MZVGfcOBPYtjlWQUQsBM6oA2O2rbs/8MJSyg6LOJtDqXl/YCnluYsg7YcDHyilPHrUaUuSFj/2OJAkLVEiYq/avf1+5PPb1wO/nNlcaaaVUn5QSnlk8Y5JJ6WU6wwaSJJ6DBxIkpY0TyYHh/st2bX8WV4sqk0pZeFkvQ0kSZKPKkiSJEmSpEnY40CSJEmSJLUycCBJkiRJklrNvTf/2WqrrVY23HDDe/NfSpIkSZKkAb773e/+oZSyYKL37tXAwYYbbshVV111b/5LSZIkSZI0QET8qu09H1WQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJajV3pjOwONjwsAumtN4vj91jxDmRJEmSJOneZY8DSZIkSZLUysCBJEmSJElq1TlwEBFzIuKaiDi/vt4oIq6IiJ9GxNkRscyiy6YkSZIkSZoJw/Q4OAS4sfH6ncCJpZQHAn8GDhxlxiRJkiRJ0szrFDiIiHWBPYAP1dcB7AKcUz9yJrD3IsifJEmSJEmaQV17HJwEvB64u76+P3B7KeVf9fVNwDqjzZokSZIkSZppAwMHEbEncGsp5btT+QcRcVBEXBURV912221TSUKSJEmSJM2QLj0OHgM8KSJ+CZxFPqLwHmDliJhbP7MucPNEK5dSTiulbFNK2WbBggUjyLIkSZIkSbq3DAwclFLeWEpZt5SyIfAs4JJSynOAS4Gn1Y/tB5y3yHIpSZIkSZJmxDCzKvR7A/DqiPgpOebBh0eTJUmSJEmSNFvMHfyRMaWUy4DL6t8/B7YdfZYkSZIkSdJsMZ0eB5IkSZIkaQln4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYDAwcRMS8iroyI70XEDRHx1rr8jIj4RURcW3+2XOS5lSRJkiRJ96q5HT7zv8AupZS/RsTSwOUR8cX63utKKecsuuxJkiRJkqSZNDBwUEopwF/ry6XrT1mUmZIkSZIkSbNDpzEOImJORFwL3ApcVEq5or51TERcFxEnRsSyiyqTkiRJkiRpZnQKHJRS7iqlbAmsC2wbEZsBbwQ2AR4JrAq8YaJ1I+KgiLgqIq667bbbRpNrSZIkSZJ0rxhqVoVSyu3ApcDupZRbSvpf4HRg25Z1TiulbFNK2WbBggXTzrAkSZIkSbr3dJlVYUFErFz/Xg7YDfhhRKxVlwWwN/D9RZdNSZIkSZI0E7rMqrAWcGZEzCEDDZ8qpZwfEZdExAIggGuBlyy6bEqSJEmSpJnQZVaF64CtJli+yyLJkSRJkiRJmjWGGuNAkiRJkiTdtxg4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa3mznQG7lMWzp/Gun8ZXT4kSZIkSerIHgeSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1Gpg4CAi5kXElRHxvYi4ISLeWpdvFBFXRMRPI+LsiFhm0WdXkiRJkiTdm7r0OPhfYJdSyhbAlsDuEbEd8E7gxFLKA4E/AwcuslxKkiRJkqQZMTBwUNJf68ul608BdgHOqcvPBPZeFBmUJEmSJEkzp9MYBxExJyKuBW4FLgJ+BtxeSvlX/chNwDqLJIeSJEmSJGnGdAoclFLuKqVsCawLbAts0vUfRMRBEXFVRFx12223TS2XkiRJkiRpRgw1q0Ip5XbgUuDRwMoRMbe+tS5wc8s6p5VStimlbLNgwYLp5FWSJEmSJN3LusyqsCAiVq5/LwfsBtxIBhCeVj+2H3DeIsqjJEmSJEmaIXMHf4S1gDMjYg4ZaPhUKeX8iPgBcFZEHA1cA3x4EeZTkiRJkiTNgIGBg1LKdcBWEyz/OTnegSRJkiRJWkINNcaBJEmSJEm6bzFwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSq4GBg4hYLyIujYgfRMQNEXFIXb4wIm6OiGvrzxMXfXYlSZIkSdK9aW6Hz/wLeE0p5eqIWBH4bkRcVN87sZRy3KLLniRJkiRJmkkDAwellFuAW+rfd0bEjcA6izpjkiRJkiRp5g01xkFEbAhsBVxRF708Iq6LiI9ExCqjzpwkSZIkSZpZnQMHEbEC8Bng0FLKHcCpwMbAlmSPhONb1jsoIq6KiKtuu+226edYkiRJkiTdazoFDiJiaTJo8PFSymcBSim/L6XcVUq5G/ggsO1E65ZSTiulbFNK2WbBggWjyrckSZIkSboXdJlVIYAPAzeWUk5oLF+r8bF9gO+PPnuSJEmSJGkmdZlV4THA84DrI+LauuxwYN+I2BIowC+BFy+C/EmSJEmSpBnUZVaFy4GY4K0LR58dSZIkSZI0mww1q4IkSZIkSbpvMXAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVKrgYGDiFgvIi6NiB9ExA0RcUhdvmpEXBQRP6m/V1n02ZUkSZIkSfemLj0O/gW8ppTyMGA74OCIeBhwGHBxKeVBwMX1tSRJkiRJWoIMDByUUm4ppVxd/74TuBFYB3gycGb92JnA3osoj5IkSZIkaYYMNcZBRGwIbAVcAaxRSrmlvvU7YI3RZk2SJEmSJM20zoGDiFgB+AxwaCnljuZ7pZQClJb1DoqIqyLiqttuu21amZUkSZIkSfeuToGDiFiaDBp8vJTy2br49xGxVn1/LeDWidYtpZxWStmmlLLNggULRpFnSZIkSZJ0L+kyq0IAHwZuLKWc0Hjr88B+9e/9gPNGnz1JkiRJkjST5nb4zGOA5wHXR8S1ddnhwLHApyLiQOBXwDMWSQ4lSZIkSdKMGRg4KKVcDkTL27uONjuSJEmSJGk2GWpWBUmSJEmSdN9i4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYDAwcR8ZGIuDUivt9YtjAibo6Ia+vPExdtNiVJkiRJ0kzo0uPgDGD3CZafWErZsv5cONpsSZIkSZKk2WBg4KCU8jXgT/dCXiRJkiRJ0iwznTEOXh4R19VHGVYZWY4kSZIkSdKsMXeK650KHAWU+vt44AUTfTAiDgIOAlh//fWn+O80253ykkumvO7B799lhDmRJEmSJI3SlHoclFJ+X0q5q5RyN/BBYNtJPntaKWWbUso2CxYsmGo+JUmSJEnSDJhS4CAi1mq83Af4fttnJUmSJEnS4mvgowoR8UlgJ2C1iLgJOBLYKSK2JB9V+CXw4kWXRUmSJEmSNFMGBg5KKftOsPjDiyAvkiRJkiRplpnOrAqSJEmSJGkJZ+BAkiRJkiS1mup0jJpBm5+5+ZTXvX6/68e9vnGTh04pnYf+8MYp52Eyxz9zzymv+5qzzx9hTiRJkiRJYI8DSZIkSZI0CQMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSp1cDAQUR8JCJujYjvN5atGhEXRcRP6u9VFm02JUmSJEnSTOjS4+AMYPe+ZYcBF5dSHgRcXF9LkiRJkqQlzMDAQSnla8Cf+hY/GTiz/n0msPdosyVJkiRJkmaDqY5xsEYp5Zb69++ANdo+GBEHRcRVEXHVbbfdNsV/J0mSJEmSZsK0B0cspRSgTPL+aaWUbUop2yxYsGC6/06SJEmSJN2Lpho4+H1ErAVQf986uixJkiRJkqTZYqqBg88D+9W/9wPOG012JEmSJEnSbNJlOsZPAt8CHhIRN0XEgcCxwG4R8RPgcfW1JEmSJElawswd9IFSyr4tb+064rxIkiRJkqRZZtqDI0qSJEmSpCWXgQNJkiRJktRq4KMK0n3dwoULZ2RdSZIkSZoN7HEgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWo1d6YzIGl4a1567ZTW+93OW440H5IkSZKWfPY4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElq5XSMWmLddNjXp7TeusfuOOKcSJIkSdLiyx4HkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWc2c6A5J0X3H8M/ec0nqvOfv8EeckLVy4cEbW1exxyksumdJ6B79/lxHnRJIkzWb2OJAkSZIkSa0MHEiSJEmSpFbTelQhIn4J3AncBfyrlLLNKDIlSZIkSZJmh1GMcbBzKeUPI0hHkiRJkiTNMj6qIEmSJEmSWk03cFCAr0TEdyPioFFkSJIkSZIkzR7TfVRhh1LKzRGxOnBRRPywlPK15gdqQOEggPXXX3+a/06SdNNhX5/yuuseu+MIcyJJkqT7gmn1OCil3Fx/3wp8Dth2gs+cVkrZppSyzYIFC6bz7yRJkiRJ0r1syoGDiLhfRKzY+xt4PPD9UWVMkiRJkiTNvOk8qrAG8LmI6KXziVLKl0aSK0mSJEmSNCtMOXBQSvk5sMUI8yJJkiRJkmYZp2OUJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVKr6QyOKGkIF1+y8ZTX3XWXn40wJ9Lsteal10553d/tvOXI8qHhHP/MPae87mvOPv/ff9902NennM66x+445XUlSdLk7HEgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTK6Ril+7AND7tgyuv+8tg9xl4snD/1TCz8y9TXncSNmzx0yus+9Ic3/vvvU15yyZTTOfj9u0x5XUmSJGm2sMeBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKnV3JnOgCQ1bX7m5lNa7/r9rh9xTjSMiy/ZeErr7brLz0ack7ThYRdMed1fHrvHCHMyZqp1G6zfU7Fw4cJ7db3F0sL501j3L6PLxyJwyksumfK6B79/lxHmRJKWDPY4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1mjvTGZAkaVab6lz3i2ie+xs3eeiU133oD28cYU7uGy6+ZOMpr7vrLj/7999rXnrtlNP53c5bTnnde8PmZ24+5XWv3+/6EeZEkkZrST52D8seB5IkSZIkqZWBA0mSJEmS1GpagYOI2D0ifhQRP42Iw0aVKUmSJEmSNDtMOXAQEXOAU4AnAA8D9o2Ih40qY5IkSZIkaeZNp8fBtsBPSyk/L6X8H3AW8OTRZEuSJEmSJM0G0wkcrAP8pvH6prpMkiRJkiQtIaKUMrUVI54G7F5KeWF9/TzgUaWUl/d97iDgoPryIcCPpp7dWWk14A+zKJ1RpjXb0hllWrMtnVGmNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2d2WSDUsqCid6YO41EbwbWa7xety4bp5RyGnDaNP7PrBYRV5VStpkt6czGPLlti2ee3LbFM09u2+KZJ7dt8cyT27Z45sltWzzz5LYtnnly25Yc03lU4TvAgyJio4hYBngW8PnRZEuSJEmSJM0GU+5xUEr5V0S8HPgyMAf4SCnlhpHlTJIkSZIkzbjpPKpAKeVC4MIR5WVxNarHMEb5OMdsy5Pbdu+mNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2dxcKUB0eUJEmSJElLvumMcSBJkiRJkpZwBg4kSZIkSVIrAwdLsIhY4r7fiIiZzsNEZmu+pCWd+969x7K+d1ne9y7LW5Imt8RdWAoiYkFELFVKubu+ntLJMCI2jYjHRMS8ZjrDphcR94uI5SNi/nTyUy0fEctFxBrTSSci5kbEtAYHrenMj4godbCQmS7r+5LZWDajytNs3LZRiIj1R51mGeFAPRGx1JJU9hHxkFGmN8qyhiWyvBeMMr0ltW6PIi+jLmuwvAekMe1jd0Q8OCK2jYgdaxtsqbp82uU0W869ETFnujftFkXdHqXZVLdHYdTnySWdgyNOQ0RsAWwL3ABcAdw9nRNPRKxDznRxVynlpmmk8xXgb8AHSilfmkY61wHvKKV8cqpp1HQ+CKwH/Bh4XSnlf6eR1odqWj8H5gGfKaWcP4V03gZsD1wNnFZK+ekU8/O5mpdPl1K+PZU0ajojKesJ0p1bSvnXiNP8d1BqCuvOKaXcVf+O6TbUmulNcf0HA7eVUv48nXzUtJYrpfxjuulMkO6sKO8RrP8MYEfgDaWUv08n3Yj4f8AjgM2AhaWU39TlQ5VV/f5XBNYBri+l/GKK6axZ01kG+FUp5a+dN+aeae0J/KOUcnF9PaVyj4hnA08G9iul/M808jOSsq7rjKq8NwJWAdYEriyl/GGYfDTSGUlZ13WfAjwMOKn5/U9h22ZV3a7rjKR+R8SjgD/2zrf1AqvAcBftoyrrus6sKu9R1e2a1qjKe9rH7nqheRnwh5qH15RSfjXV81ttL9+PbAfeVkq5Zdg81c8/DFgbeBBwfu/7n0J+dqn5uL6+nkNuZ5nBur0FsBawOvBZ4O+llLunUEajPJbMqmP3qM6TNa3+8v7bqIPts4GBgymKiKcCR5LTUe4FXAl8HLi4lFKmsGM+D3guMAf4OnD8sCfnemCeB5wF/BkI4H+BtwGPBr4C/LXLhVY9UbywlPL4egB8MbmjrwR8sJRyQ8c8vQLYHTgYOBE4D7ij5uuKYQ4aNa19gGcCDwYeAuwB/BJ4Vynl9x3TeRmwJ3A4sD+wA/BE8qA/zElwReAiMvjwv8CtwPuBrYFvkQe1gQfVUZX1BPl7dU3nR6WUDzXe63ywr+kcAKxQ83NiKeVvU0znYGAj4K/A26caQKppHQ0c02vERMQypZT/m0KeLgSOKqV8s/+9IevCW8k6+SOy8dnrgTKVE+veZABxBeDsXjBiCnmadnnXdD4AHF1K+fWw+ehL56fA80op34yIFYDNgR/2gjZd061pXQN8kDwGfBP4Ktko+vtk606QzreAXwG/B7YDvga8ZQrpnAfMJ4OIXyilfLbr+hOk92VgF+BU4D2llJ/FkAHAmqcfk42hb0bEGsBW5HdwSynlb13Ke1Rl3UhrVOX9VeBO4HfAjaWU9wyTl0Za0y7rRp5+DjynlvfSwMbAr6ewbbOmbjfSGkn9johrgQXAGcA7Syl3TCGNkZR1I61ZU96jrNs1vWsZTXlP+9gdEccAq5VSXhwRbyXbJn8B7gLOK6V8e8hzwJfJXtM/A+5PfnenDHOeq+l8nTxv/63m5YheG6frttXPXQlsA3wYeH2Zws2IRVC3vwV8jwywvKuUcl1ELF1K+ecU0hnVsWTWHLtHdZ5spNUs73eWUq4ftrwXBz6qMHXPI6OvhwEPB64HjgCOgqGjufPIi9hXkhd7WwDHRNU1nZL+AZxMBg3+E/gJ8GngeGDpLkGD6rfkDgDwRnIH/Q1wG3BwvcAdtF1LkXf131NK+SV5Yf1M4DnAf5ABl2H8g2y03FZK+QYZIHkb8K+a3kARsQzwIuDIUsq1pZRDgeuArRsXewO75NWyvoOcv3VV8qS1LHmCPgGYO8QF47TLegJvAXYiD4qbR3YL3LrmfZgL2WPI7+zvZET+VxFx2BTSOZL8jq4ke4y8coh1++1JltNPI+KImpf/m0Ke3g78oZ4wlouIh0bE82oQYpgL4yOBh5IX2GsC94+Ix00hP73G1cHkHbDtgW9GxItrWsPmaRTl/SjghcB5EfGWiJhXSim18TiMrYHralkvII9JbwKuiYi3wFDb9wbghlLKKYwdA95D1s3XDpGn15GNg2eSx+79yAbohRGxA/y7MTDI0cBvSymPJYOz74yIRwyRj37vI+9WzAEuiojXkOW/+RBpPAD4Xi3vVYGzyYDkx4A3Q+fyHlVZw+jK+xgyGLo38Eng+RHR6fg/gVGUNcCujNXvNWu+jgJ+OWQ5zba6DSOo343/9QmybJYD/jsinhMRp0TEWkO0d0ZV1jD7ynskdXvE5T3tY3fkY6HLkhdWkBdnc8n26V/IG0LDnAOOBP5cSnkc2d46jQz8vD9qt/Mhjt3Xl1IOJI8FDwL2bn6gY3AV4CTy5tjdZF08JCLOiYj1o3vX/lHW7beR2/Zi4NvAsRFxMvDxyLv1XctolMeS2XbsHtV5Eu5Z3u/sL+8lRinFnyF+GOulcRTwXmD1xnsbknegjxwyzUOBTzZerwp8Hrh/fb0dsN6QaR4DbF7/vp484V8EPK7j+uuQ3cqOAU4Hlq/L1wA+B2zbsZxeBnyXvEj7c+P9J9U8bTDENj2SjAQe1lg2B9i5btsDOqQxnwwcrEJe3AO8lLyzD3AY8Koh8jQHWFjLZUXyQv0i4BTgkfdGWU+Q3gLgRmDF+vob5Mn+K8B/A5t1TGclsjfFJo1lWwGXA1/q+t2R3QB/DKxQX+9Qt/cB9fVSQ27fKuRB/WnApWTj433AGb3vpOP3dizw1Pr6I+SdgqvIQNKuHfOyat2/1quvLydPZleSdw4ePeR2/RhYq75eGtiN7BVxCvV4cG+WN7A8ue++DPhMLZ/TgHOG/M5WBM4B/h/Z6DyyLn8YeZLdaoi0nkT28FgdOJcMTEI2cL9Kh+NA/fxjgeP6ls0FXkL2sOiSxso1/w9vLHsr8Nr69/3IHlfD1vF3ko3Ylcmg4j+Bpwyx/rxaDw8gg4iH1+UbkXfYdhuirN823bIeYXn3enlt3Vh2MHl3p/f+/tRj+71R1jWNBeRF2prkOeTtjXLqfJ4ju8yOom7vNN2yXhT1u5bzOWTvrO2A75M9EB8zRJ7uP4qyHmX9nsV1exTlvSJ57J/WsRt4PPAn4Hzgl43l6wCXMEQ7B3gKjXY2ea58ANkWe03HNJYjewk3y/tpwEca2/0GYJmO6c0nz40L6jb9jOzB8B/3dt0mbxy+Hji4vv4ceWNj51qHTiNvJN4rdXsR1u/Zcp4cWXkvDj/2OBhSqbWC7No2D/iPiFi73on7JRmN2zwilh8i2c8AH4gcVGVeKeVPZNf3bSNiOTKiOuyz098Del3CehHd08huea2iDmBYSrmZjLytR0ZBD64fuZMMkAwag2Fe/f1fNf+/Ab7Su1NRSvk8ebIe2NUpIlas63yHPGFsHRFnR8ReJXtQXEEeqCd9tKPeSf4LeWC+o4x1afoisF5EbAs8m9zpO6n//8/kHd0Xkr0EDiUvZjuNm1DL+qXA+mRZv7y+1bWs+/0feYH34BrpXIfsybIPeaG9U8d83UHehXlsY9k1pZQdyPq1Tcf8PAj4aKmP3pRSLicvzjer778jIp7UJaGIiJJdAG8mGyw7kw2/F5L1YvXSoVdN/czXgSfWCPUapZQDSynbkHcOnt4xin4nGYx5VEQcQF60P6+Usi0ZzHhil+2qefozeRLbpr7+ZynlIuAg8kTdNZK+EfCxUZR3ya6Il5ANlqcC7yD3kQ0jYpNB6/fKsJRyJ/ApMgCxFvDjyMc4fkAGubbouG0APyT3k3eRDawP1f9xNXA72Uuii58Dj4yIL0fExjWNf5HHrB0iYvtBCZRSbif39+YjVxcxto8dC2xRuj8+0+td9G3gBTX9u8g7YydFxEu7pFPyWc13kBcLawN3RY7B8QtyTJ4HdUmHvNDYkeyxthZTL2vIY+LWEfHViHhgTWfY8r4TeA15AdJzGRkcgyyn9ctwjxpcwTTKOrIr6m1kPT6dLO8v1PxeTZ4ftuuQTgDXko3N6dbtHwLbTKdu12Pt7WT9vq3x1pTqd+R4Kz8hL4AeVnJMoEIGbD8a+bhmF38i6+UZTLGsG24gz2/vZnrl/VOyvL8yjWPJnWRZj6RuN8r7q8CmUy3vmq+zgFdQg9LDHLsjYtm6j3wF2IQ8X385ak86sq27KmO9Lrv4HvCUiDgzItar58qfA2cCe0bEph226x9kT9/bG4uvALaM7C37VmClUnszTqbuK38h2xTPqm2628n94/zIx2y7uJ2sk1M+jtTPF/KG0QkR8SXyZtGLSymXkufhjchAUBfTPk/W8unV7z823rqMKdTv2XaebJT38ZFjzG0+jfKe/WY6crG4/ZCPd6xB3rHckrzo/yB5UbYXeQL67BDpRcvyV5DRtPcDx3ZI54VkYOAZjWWHkI21R9XXAyOn5B3Tb5M7MIxddF5HnvD/ixzEb1A6pwJ79i17Zi2fo2q5vbtjGX0JOJCxXgxrkAGaS2teP0eOCTEonTfTd7edsV4HJ5LBjVd3SGd/8uLufo1lR1EPsPX1Ch3S2Y08aD2+vl6d7Bb2vWHKuiXtN5IBh9OAExrL965pD7wrXz//2Jqf/2yuAzwduIDud5o2qvtM7zs8oOZtfTLIsdwUtvEE8o7X88i7KkfVbe4aSV+JbCh+hjxRb1CXb0z2GOgU+QaeSj7z9y5qz5W6fIe6P3Uq67rOXuSzf+9u7q9kdP+/aDleND63cv29/nTKu/97JU/ID6j73TnAa2u9mLSsycDXUo3XB5MXR5eQQZWXkcHMgd8/eSxq1sFVyEDie8lusDuQvZsGbduCvjy9m2wknEReHD+evPCaNyCduf11pJb5fLKxcGjXut2fFhl4fT95gfS5Zvod69Gc+vvp5J3UL5PnlCNq+S8/YP37kXfkmnnam3wMrnNZ1/WW6UvnsLpd7xmyvO9xrCHPx0uT5+C3d6zbqwMPJC9+g+xV8/6ah3OHKWsymLdP4/URZAP/EvIi6elkr61BedqUxl1g8g7aPsOWN9ld+wGN1726PVRZ13XXoO94U+v3SsPU77o/ROP1VmRX4It75U022ic93tZ9YtlePahlfTvZDuhc1o39bem+PO5DHgOGrd/NbeuV9wlkIKJzebfU7TnD1O0J8hPk+exjtU4OU97LMP54+xzy2HEpQxy7yXbsxTTaseSjfe8lH124FHh5xzK5H7XdVb+z48geeYfX+rEF2dtu2QHpbE1fz4RGvTq2lnmXuv0Q8ry4dH29Lnmevprxx5JBZb1O3/f2pmnU7f79bZO6TYc3/tfVg+okIzpP1nWXq99PM72pHLtn1XmysR3N8l6bbNe/aZjyXpx+ZjwDi9MPOZbBBbWif4scUAPygvgEMtr5Qbp3KX44eTfxveQF2tqNg9ca5IAt32HwxcI+5IXOK8jIdy96thNjXbEHXuDVA8E3yYPx6yZ4f1dqN+oO+fl2Y6dckeyCtSZ5N/3YumNOul113T2BbzZeP5g86G9VDzq7AOt2SOeJwNf6li3b+PuFZOBg0nIiI5B3k3fgnsvY4yBPIgc47FrWe9UyegvZ0HxY88BSv4uBZT1Buss3/r4feSL7EhmVhewmeFCHdFahHsSB1cjBsX5HXjA+jYysHzyNfen+ZIT22i75qessIBt1azbqxlVkj5p167IuAZvVyBPQsuQJ7S11vzmm7o8XAS/rkM5jyK7FC8hG1vJk18dXkfvvl4bYts0YCzqtR97h+WFNa4ta3l3y9Ang6Y3XvePJUOVNXuCvSw1ekMeSG8geQr2yvl+HdL4L7NG3bMVajxaSAa4ndiyjy8hgxXMay1Yigz43k88nvqBDOl8hg417NZY9pKZzCXkM379DOm8hzwfvoq+hQgYA7ybvPnXZtreRdwbfBTyoLntArQfrNT436Fyw+wTLVgBeQAaPXgHs2CE/HyIbUR8i71DuQTaQzhimrGtavQuH95IN/C3IANaZZMO4a3m/qpbHYcCzaHTbBfbtWt71O/s4eTG1SV22NhnsbqbZ5fz0vf7/Sx5fPlDL7hga++Mk6VwH7Nu3bOla9r8lL9S71O3rmvtHXbYR+ShW57Ku632alu7jw9Tvuq8dD2zXWPZEak+/+rrLOfM95COcp5F3giHvVJ9Yy+ntXcq6jN/fTmDsEa6VyB5fNw1R3ofU7TiasYvHh9SyG+ZYsmWz3jH+QuRZXcu6fv715LnnbWQbZW3gceRxb5jyPqpux9vq970meaH2OnKMgYHHbvI8ciHZXvtvsl2zW329PXlc2bnLdtX0Pkgel06p+8c8su38EfL89AngJR3SuZLsmfnCCd7bpZb3vh3SmWh/ezj5/P76ze90QDpf7S8H8jjyvlq3Ox1HGvvbCTSO8+S5/FRygMOv0KHtxojOk3W93r57Kn3XR2QPxq7Hkll1nuzb3/7dG5NsE542THkvTj8znoHF6acesA6pB9AdyCjS1TSe8WK4u4vXk3fwjqgHoE/XA2uvsX868KIO6XwBeEL9+x11Z/8h2dXq4XV5l0bQt8jnFR9S87ZwiuV0fO8gULfvDPJkfASwypBpvZKx54YOIht9/01GPfcYIp3v9w4W5B3idwO/Jkfm731mYACifu4TZHTyTLJRfCCNO8Qdy/o7vfyTgZqPktH7dwHzp1juTyVPMjcCz28s37kuuxw4vUM6+5J3la+mMd4DOTPHB8kGw2Ed0tkDeGxz32B8g+hM+oI5k6T1PPLE8/VaRq8mI7kvZCxg06VHzbPJk/pnaz3djwwebEg+z/g8ujUYD6z7y4d6dahu38b1uz0PeF/HbTugbtf5feW9Ta1nJ9Ktl8+LyCDU15jgpFf3w//ukM4rgK9PsPw/qI2pjmV9IPCp3j5BNhSfSzb0lqJ7z5Aggzyn1Hp5ARnc2YSx48wm1IuJAeksV7+bMxgL9K7H2LFhnY55ejF5wbAlGZDZgxyDZZO6bVuTA7l2SetlZON6S/K49l3GxrkYeMejkc6ryQbYDXQcy2aS7/8SsvHzmPo9fg54a31/w0Fl3UjrVeQF64PrvndY3Wd6gcw1OqbzylreWzAW6Du+t51k43HguBtk4Prz5EXHG8mG7OHk8bvXM6/r3aoDaz3ahbzQfHTf+13L6BnAV3r/m7zYOpq8cFuJvPAbmBbwfOBLjXSeTfYKeEHd3s7n3rpt59S/e70sH0U9N5HHpknrd93fViK7f59a6/abyIv9vYFH1M8N7NlFnvvPr3Xvs/X1f1ADG3QIGA/Y39ZsvL9Bl/2OfLzwy+T59VzyuLti4/0FQ+Tn52Svvs0by3u9IdcjZ9fpktZrqI/O1Xp9PHmj5tnAQ7vWb3J/u4CxXmbXkcHC3vG/y53vOeQF1c719ek1zSvrtg51jCKPSxfUOnBuzdfeZJt1Dnlx3OXu94E1L1uSAc19G/kN8rh38jT2t1dTjwUd6/YLgLMa+9om5PmkF+SZ37F8JtrfDid7WO1Mtpd2ZMAYV4zwPFk/27/vvpC8CbkbeUNjPXJa9anstzN2nmzZ344jzyV7kr3GdhhU3ovjz4xnYHH5IU9259E34B3Z6P8asP2Q6e1Nzhnbe/3/agU8j74I5iRpRD1gvbPuSI8nLxweWg8W7yYbSV3uwu7B+G7WW5ANtQPr63EXfQPSen79v+uRF1G97lxnkRdtnbvs1APdVWQviq+QXaRWJLtun0eH3h21PK4lo56PI+/070k27i+j4wVeX3ovrfk4kLwj9A0yit6lgbcJYwPCzKvr71rL69NkYKJzAKqRzg9qOT29HhRf2veZtRnctXweGWTYnGwQf4OM8g47uNs88tnFf5In+3Ub7/V6omxBh4GVyIvGW2u5bU0GSN5ONj6a3YQHRZmXJafufGDdvhPJRszJ1Oh1x21bluwNtAnZM+NCMpjx0V6Z1+VdAkjLko8TPajWq3PJE9BRwJPqZ7pcpM+r39UGZIPq64xdCPXuhG0/qLzJnhhfY6zh83zyOPIxMhDZG3BxUFkvU+t178L+reRFf2+gx+cP2qaW/e69ZAPoxbXc/s7wA8fuRjaGHkXePftWzWvXi9je99+78DmjbtdXycBd79GjLnf1liEDho9sLPsIjTsiNO5cTZLOHLJhv2P9zm6reWre9Xp8xzy9iPEBrOXJffV44IAhy/p1NAJx9bvbm2zYdh14aimyh8CujWUnkXd0Th/m+6/18BmNNL5c69IRZJfXro8n9erAlvX1u8jjXKc61JfWDtTH9mo+zqnfwZHAB4ZIZw9ypifI3jyfJS8a3wm8eYh0liUH0OvV4zeQ+/8vyPN3r5dd18fUDqx18xm1/L9AntNXHiI/Pwa2qa8/RAYOP0GeAzr1fKnrTrq/kW2qtTumcz3jB3r7JPWY0NtvOqSzdK1Hh5AXNBeQgbHVmts/xPa9A3ha4/WmNd1TyXEOuqQRZFtpz8ayt5Ln3A+Q41N0zc/rarm8jjyH9x6deynZvly9YzpL1XR6dfIDZFDiM+Rd+QOHqEs/pZ4H6352HkOc/xtp9e9vn6n72tuo3dQ75uf/GLuJ9Ka6r1xDnleGvrCdYH/7PNn+7bS/NdKZ1nmysZ+07bvv67rvDtpv6+thzpM7MM3zZP3sRPvbq8hAyxIXMPj3ds50BhanHzIyeBp9F75kJKzTgaKxzsMYG2V8Tt3J30M2/C6l4+MONa1tyIufD5ANoeaz0V+n24lwOcaeHevdOX8WGSnsNDtAX3rHkd2R38/4RwK+CKw6ZFqH1O37IuOf3/wGsGGH9ZchL8xeRY50+5HGe+vXA0mniCV5AluxrrMbeTH2Y7Lh+Akady4mSWNpxh4DWItGA5q8APwCQ/Y6IBuZpzdeP7rmZ5nG/+lyp+Ew4L8arzcmG6C9uvHQLnWTfFzl+Pr3yWRD9ATGGg67d6mX9bNrUu9cN5ZtQgaPPkDjLs+AdLZm/HNwK5LBrDcB/1mXdbnY//fdTTJifjN5EtqdbGDv2SU/df1tGHvudAVynuTXkMeU/+q679X6d3qjfh1JXsQOM0Jx77t5Wy2rIKP6zyLv7n2QDo8VNNI5huz5dCaN5xfJcQnOp2Ojsa4zt5Fmr5H1HbLx+GsaJ+8h8jalWWfqd/7s+vcCsrv6avX1y8mGaNe7zfOZ5iwvjW16DOO7a76PHLzzLWQDcuBdtEadnNbsNY31dqvfzwF9aT2/lvmkjdDGth1CHs8eSz6CdTkZzHgzfSN+D0hvH/Ju00fJR5x6gbC1yQb2dh3T2ZS+3oDkue7zwAPr666NzzWZfFadR3VM52G1Lr6axh08cl++gI4XfLUufow8v55IvSNPHg+OJoM4wxxX5nDPmYe+SgYQtxn0/ZPBzF4Q9f7kRdD8xvd5/qB61EhrPnkHdLL9rcs4R+uQx+mlGQvMvhl4WyOdSXuLNur23uTjJPPJR8LeS7YB9iD32yOGKOunkYH/PRrLlqr5OZcOj5fVdQ4gLxSfUcv8O+QUwS8DPjxEftYng2FH1G3aqPHedxmuF8yLmeYMXWQ76Nl9y95V092xvu7aE25a+1vj+z+NPO9/jLyR0XsM5+X1O+vcm6bD/tZ1NrRRnCd7++7e9XXbvjuw9wIjnA2NDBpM+zxZ1336gP2tU9t0cfuZ8QwsDj9kF6lta+U9ve7kL+17v/OAiHWdOfVg+p+1gl1KjV6TkbSBU7iQkcXeXa/e1I1H1B3qMWQ07JMd0nkdYyfm/md1X03eaRjYoCIbdJvXg8X2ZPfNH5AX7LvW/Hx6iDJak7EL3xeTjcVP153yuC5p1Z34/o2DzdZ9B42FdDwRMr6b/ZrkCed71AtFGncfBqSzOi0np5qfj3RJp5kvsifFdvV179m/i+uBdpVav+Z3SGsrau8ZasCHDNhsSl7Yfo0OJ3vyhLVl4/WG5Ann5lrffkz3wMEydb+7iPEDiC1Lx2caG3n6HBnI2IO8QD6tvvcFBjRi+/LzGeBvZAPh/Y33DiIvlLveuVyBsedqL2JsOrAgL/5Pottd4gfQaGDU7/xc8kS/UZe8NNZ9MdlIOIXGAFJk4HTYYERvvIa3NpbNIx+D6Rwcbaz7GDKAcQJweV22PY07dB3TeUatB28ljyv3IxsBQ+WJPL6s03i9PNmjaX6XelR/34/xg5BtSAZEtiW7CG84qF43/o6+tO5Pjltzd4d07s9Y188tyeDo2YwFapYnj+edAz51vcfXevMuxj93ez0dg8j1f7+t7r9fpD66Q15wfZHherE9mLFZOZ7SWH4tHY9Jfen1elCtQAb/F04hjQeTx41fU8cXqtt8zTB5qvtHb7Cw5uNqV9MhqN2X1g7kjYfmoHarkz0jh72YOYS8CHkVGZDYlOzWPem5hAl6W/XV92XJi74uwexeQGZe3z6y0ZD7Wy/YtDzjB1jcmrFHqK5niOMuYxdqc8hj5hPJ4/f/MUSgrqZxQK2HL2P8ow/XMMQNCcYeDT2fsYHeFpBtgEkDEOQNh8fTuJNf03sjObj02cC7htyuVcgBGl9S12/29riWjo+GTFS3yJtcnQOQjfV2IHtzDL2/Mb49uREZPDyxWc/Ixw4GfmeMHYN69eiVU9zflu97/XSmeJ5kgh4OU9136+f7B0Ycar8dkHbn8+QE6+5H9sab1v62OP3MeAZm+w8Ztfsh4+9070J2L7uwHqBvIKck6pLe+sD/q38vWw+uj2BskKaN60Fo0oMg+Tztv5/xaSzfmOzi9CnyxDNpg5ocyOUPZNRzucby5uinL2VA5Kymcwl5l/TEumwZ8u7iu2s5nUyHhiI5UNw5ZMPuG4w9D7sOORjdu8mgyfwOebqQvNC4Aji07/2Hkg3hQXe9FjB+4MK59ecEGnewO2zXw8jAx5nkHa/mQSbqdt84KD+TpL9U3++Pk+MVvI9uM3P07gj3giy9k9Fx5N2Vkzums3rf6+bB/hHkwbnTbBGMf8ThFWRQ7YjG/vIxJhjIc4J0es8LbkJ2vf1v8nGANerys+jQ3ZFGQI88YW1b62rv+dFT6dgtmPE9cXYj7zydRT1x1bQG9mSifWaW5ciuk50i8X3rPokMptzE2CMPp3XZNiZ4/pXxjbQzgKM65uPRZI+Q5gXn/uQd8U53Yhvl+3Zgl8ayV5M9kLbtz+Mk6RxA3pGfsIdSrZ9dt21Us7x8abK6W/fbLrPOXEoGiXuDTq1DHme/QTaqPtslnbruC8luqb1HVXrdXT9N3sk8F3hvh3T2Js8d25MB0XXIoG2vnM6iMU7NJOlsX///f1DPrfV7/AJ5LDiX7rP8bFm37zHcc9yWTcmL/y7HkuX6Xq/N1GYw2pAMhG7e2K7jyPPxheR5oNNFEXlO3JexsZHmMr6x/6Eu9ZvRzTx0RC8vjWXN2WLOGGJ/+zQtA7CRAdqu+9tlNa2V6+vm9386GTR4fce6/T4ax6S+9y+iw/m2fnYjsi25WX39FLL9dBoZWPss3Z7b34zsDfQIamCEDLj32gKfGJSnul3Xkj0nbiH30YeQj/seRx6PTqRD775eHZxg2dNqWkfRcYauum0H0LhB0KhHG5Btr1d0SGdbsj30mMbrk4bd3+o+8u9xQ+qyZjtzmHPJS+kLVJHnu98w9phAl/3t09xzkNZDGHJ2tvq53gxt60z0PdJx3yV7GnyACQaHZLj9tncO2JU8ZvYC97263ek8WT+7BXmD5dHk+eh5ZKDm/cPsb4vrz4xnYLb/kHdKXl//3qweqD5ADmD0ajLKvMkQ6X2ZnOd9oveWJiNXR3fM10H17+0Yewbp8eRdsOXoNlDMZ8kLqQ+QEbKH1OXNk+HAZ+zqAfNV5Anig+QYAkfSOOnT8bl9siH8cjLi+nzyOeZvMeQjE2Tj65C6Yz+G7C59I/VETUaLBw6wSAYXfk2doaLvvd5d+YHd2+p2vYY80byF+nxc47vfkuEHDNqMvBN8Mn0R3npQ+3Etu0GzRexGBkL+H30nBvIxj5+SJ4FB6TweOHWC5b2D83pkIK7rzBOXMdZ1bz7whFp23yO7BX+zQ572rOlMOKAT2ei+pkM6e5FBj1MYH2R7LdnYv7BLGdV1tq/7yXnAgxvL30BezHya7B46KE+tM7PU9/+j5mnQmARb1n3lDPLE+uBat46t39cHa/0dlJ+dyMDezv37BBkceyh5gdr1+7+CbADfSm1skt1Ne8HXqcxg8nDyruNDGOtG2SWd/hlVesGi+WQQeHvyblyXtEY1y0v/rDMPIRv9jyCPKSuQPbQGpfMfZA+lo8mLmRV731n9vRvdB47ch7FZfn5W69Ya9btfk7xjtUWHdJ5c01lIBrDWY6yba9Tv47IO27ZPrUeHkvvod8hj5qrkBdcr6T6zx1PJff1YxuZbfxzj97kuPbKa+8lEd9UfR7cZjHYnj+8fqL97syjNJ0dl35vuz7Y/kTyenkD2NFiJ8UHf7eh2LunfT6Y689CgfeSRtc52SevJffvJmuTFQ++iZn+67W+PJ889pzB+Jo1eIOsN5Dg6g9Lp1e0j6//dmPEXkKuSbbMu27YHeZF2BHmcPKfuKyuRx/AD6TCeSP1+vlW34QdkQO1xjfc3JC/SB23bxb39iQzyvJsMGL2ySz3sS2t78qJ4E8b3ElmTPL68nQ4zdNUyuoIMQv4GeG3XetiXzp7kueSY+j2vztgxchXy+DBwf5tkH1m5ltlW5D7Y+fvvWza3fl/PG2J/699H1iEDKrswxOxsjf1kwhnaat62osO+y9i55JVkG/SRNV9r1/f3p9t+238OuIIMIvVuHK1Kh/Nk/WzzHPADstf4Y2sd3ZwMdHQav2dx/ZnxDMzmn3pw+hNjz2qfSwYLDqDjBX5fes+gMao5GdlfWCte7wQ28JkYMjDwBsaep7uq7kAvIhtqR3XcAfYFLm28fgdwZPP/dNyuB1BHha6vf0reLe1d4D1jiDJaB/hi37KjyRPsp+gwAEpdp9cFvH8wy/3IxmbX50a3J58P25+xi8PmAEgb0W1AxF37vvstyJN07875WgwxMmwjnSvIE+g59XtfufHe3Pr+wIHoyBPhdbWcX0VeON6PelFLRlG7zDZwCWOPbizH2CMOvedAd6Qx08KAtK5m7BGaNciAy0Prz/z6euUppPP/yAus3j73gi51tJbBi+v+2pzycIVaXk+k+8wc3yNHYT6RjFI/gmw87kReNPz7wnRAOtczwcwsfZ/pciz4BdnV7l3kHaJjGeuiPq/mrUsA8dd1H3kHebLfYoLPDPzOGnW7N2jZWmT31E5jB/Sl0z+DyZm1Tvx7lhe63/1qzqhyLHmBfxrZu2o+HQfqY0SzvHDPWWfOJxv8J9KYRqtDOleSjb0V6vc3dLfdRlpfYPwsP+eRwad30vEitq771UY6J5B34G+q6fS+ty4B289TG3L1ezqffOSh0928vrTObeRpDtkeuIzh2wL9+8mWUyzr7zB2kfYUGgMcTyGtb/fqDHm388j63fVm01iLOn5Dh7RGMfPQpPsIGTzq9IgS2btn//r3C8nj5A9rmg+ry7vsb5eTwetdyQuWl/e9vwYdxpLoq9vHkz0Qf9NXt7sek75O7QlH9mj9MXmhPlRPM/JGS2/wwVXI9sm3GP+YWWueGBus+yTyxk/z7vmD637y3CHzdAt5bDqGDACv3SUvLftJr7y3rPVyqKBBXfcKxto3p5AB+3FlNERak51LlqX7o5zXNLbtibWuX8n4AW677G/9+8g55I22I6nH7SHqZP8MbW/pe3+5LvsuE88Y96O6z/R613TZb9vOAQN7q02Q1rmMPwe8lrwZ0nkA2sX9Z8YzMNt/yIjbp4A/0ojq1QPruMH6OqT1afIOxUpkD4ELyMb6ceQJepgphTYmo6dPAk5oLF9A3jXsMl3OHoyNnj6HjOZ9gyFnGajrf55s2H8IuLKx/Gn1oDjMQf4M8g7K6tQL/br8P+kYhKjf24G1vCcazPLN9e9BkerVeweJ+vrN5En5ZPLC+kvUu58D0tmUsRkqencnTqE2PGpd6DRvbCPN11LH1iAbDNfX7b268b+6Doi1HnliX1gPym8lL0B6swR0CWi9Abi+8fqsmua3GfJuA3nn/NON1xeRDZvz6DsJTSGdL5MnpGFGGn9No6z3r2W997D7SV1/HxrBMfLOxfk1T2+ke8Bub9pnZulNMdVlOqjtqNNK1ddr1e39FB3Hj2jUoaPJwM6zyEb5u8nAzJya3049aur+0huwcm5d/1PAyxpl2GWAprYZTNYnG0ZDzWDCPWdUuZlsuG9B94HHRjbLC5PPOnMuHZ77rWm8t/H6ATUfx5INvKXo1vBcirE7jCdxz1l+3lXr+KDH3oJs3J1cy3pb8k7qI8jj+ifJc+mk57iaTpDn1zdQnzkmz1P7kIHl1wzarl5a9ffbyIuF1RvvbUgeV46c5n5yYC2/feh2h3hnGmMYkYGrrwI71de70nFk75rWBxrldkutQ9uQF11v6JLOJPvJsDMP9Qaea9tHBj7m0pfek8mL/ofV3+uTF/nHkhcQXdpLzwDO7CuzTzNc76de3X7fgLrddbDmFcggz2aNZe8jL7LPo9vYVL08nUIGn3pjQZxR0/kCE/S2nCS9x9R9rDdVam/f2YG8SO7a1X0Tct/dBHg9+Vjim6mzq5AXyV22b1vGBg7u9ca9kDqgbt3GgeMb1XTeW/9eutbpp9a6fjkT9LRsK++WfaR5Lul0LUA+6nAdY+OwXUFe9O9U95mBj8w00tprgn2kN5bX5+h+fnsCjdlguOcMbV3OJVG/q2O557lkARlg+/yg/YTB54DPMPw54CjueQ7YiDz2Luxa3ovzz4xnYHH5qQe9nRqvdwW+3XHd3uiiT62V7hTyzkmvEs8jn4saOLheraC71h3oGPIC5hryJLZCPQB9tUM6m5Jd2TdjfHfEDcgTzsvodjdnr3rwWpG8g/cUxg+mdDAd5teun305Y91azyUvgE9iLFJ4GPUiYEA6r64HuhXJO1W/p96Vq+93GsyS7J77LHJAnuZznuvW7/AualBjQDobk92jN6M+ClKX70lGUZ9OxznfG+vOIaft6z1a8jbGRuffk+z18ZAh03waYyfT95Cjjr+NDE51mZHh+WT33ffU+nxSY9/5IUNMWUqemC8kgyNv7NWpWj++zgTPu02SzgUTpLNFTadLT4O5dV9rjnPyHDLK3GvEDDNg4GbkRfBhtZw+0di2b9D3jOEk6WxOnvjaZmbpOvDcmmQX+1cyvsfKTuRdv66DRi5F46Kg1vuXkyfto8npOXfumNbytTyaXZOfQO7Xy9Z87dDx+x/ZDCZkwPhT3HNGlU/ScfC5mv+HkcehnzCNWV7qOgeTFwtTnXVmbqOMeneEtyGDKk8YIh+9htWWZJB3yrP81M/uUcvio8Alfe9dRPdBtXYgA6qnkBdln6nLH17zOUzg6AF1neeQQcl5dfnaZCCqyxR8c/r2s/795H+67Cf1f+5S95VmV/mT69/fHWLfXYuxHljb0RijgbyhcDodpwVkBDMP1XQ2GdU+Utc7nDyffZLx0x1eSrfHQjZm7LG5pet+c0Rdv/PjqnX9vcnA+n9Np243tuurZID2vdSgNNktu3PAhzx/f4ocU+YMapuEDLq8g24XfKvW388kAz8fJo8lm5EXe58YIj8rMP54tmXdRz5Y68Wf6XbTZiUymLEsY8eo/chAxFJkm+UxHdKZw1hvkE0YPw3f2vX7HOa4vVLLPjLMuWR+Xf/NZAChOVDzo2v5dwrU1HXeTF6j9O8jl3TZR+pn12VsFq7e8XFKM7SRwZBTmP65ZAcywHYy0z8HbEz2qJ7yOWBx/5nxDCyOP4zN4bt7x883n81an+xSelDfZ65hwIUeeXL/GnlX8Zx6wNiAPBmfTV5kX8aAuww1nSvJOy7fY2wgpN7O3nsmfNAIrCuRAZBryWfNehexnyfvfr2M7Fa0RYcyWqN+9oGNZeswNobAfPLO7KTBlZrOT8hnjz5JHux3rHm8jGxgDxzMspbRtfUAcSF5p2Q5xo+g/pcO6axdD3Cfrt/P2eSz5OuQz7PdAPyK4brw9k5+zdGdd2f8CM+n0v0CtDeY4mY1r6vX+vFG8uJ2vy75qX+vRt49+y2Nkx95QdtpruW+feWEui0LG8vfTn1GsWM6G5ENjlMZ/yhO53QYazD2GuhBBqjOYrhGXq+s9ycbUqc380A20A7rkE4vH1OemaXve3t03TcOqPWgdwF5KvW55EHbRd4JuEewsdb17zHkbCEt/+eMum2d7jgyghlMyIuEjRtlsibZK+c6hp9RZdXG3yuRF2Vr9uVp4Cwv5MVdbyaddclj7dcYftaZA6lzmrfU0T/T/fn/Y6mDizE2PeVhDD/LzwKyIT2n/j2XvODbgzwmHAN8vGOe7l/r5oPJAO3jGXuu9Z0M0bW/US57kgGsU8m7VnuRx7xOMyvRN6r/VPcT8ri/VN+yjchef0fTuOs3qH5PsI3NNsuRXdKqZdxMq7effG+Y/YS8UO0N0Lli3Ueag+R22kfqZ19HDQ6TF7DvIs9tx5PHunfR4cYGeRH7QCYej2Jh3fc6BeoZG+9nxVo3j5pK3a5p9NoCTyXPAy+iXmzX+t1psLdGes+o++zevTKv6ZwyYL2Hke2t91ODFXX73ky2VU8nb0h1mvmgrx71j5WzNfnIyuld6mTL8pXI3g8nDdq2Rp184CR5XAh8qGN9bAYcpjM7V69OziHPc0/jnjOGdT0GLFfTWZs8jl9Ntr067yM1naXIdu1E+0nnGdr61pvyuaSutzrZXtuCDGjtzBTOATSuh8ibaecyxXPA4v4z4xlY3H5qBVyPxh3sDuu8j4ySrtFY1rwzdxy1W+6AdE4HDq9/70XeVe6NEL05GUToMp/xh6lzA5PR5dNq/g5jLIgwcH7l+vsQ8uLlJWRE8WXkxfHb68/AwQcbeeofhPJzNe1HkBfGh3RI5yOMdf8/lfEXZbuRjYcJTyZ96fwn8Mb696vqtp1FRj73IQdD6TKa/8mMnUg3JwMqH6ZewNb8DvVcKvmIxEo0GvyMb+itTQaihpo2ra771JrHC+vrZRgQsSYbsPMZPz3SWo2/N6j56VI3VySDNo+tr/euZXQZefHwKjLANGn0mzwh94JOc+p2fYSMnB81RDobMH5E/+bF9tJk0O6SQWVUP78JOe7HXMYa6OuSd4n2ZazHxqBpnLYguyE+hLyLsnvdR3qN7Y3pNjPL/cjGRy/YsBN5HHg32bA+nAzWTRrVJ6P2F5DHkSvoC8bU+nHboLJu1N2H9pcnY43ts8g5lwdt20hmMCH32fPJffYj5EXfUmSD+Lwh962HkgNivZwJ7t7W77PLLC9bk70J+stog7q/HE+3WWceyYBpw8gLmi5dyzcng4UfZfxdwkeR563e87yDZvnZqm7bOWQAeulGOpeRFyeXdvj+ezPzHEd2u92v7/t/INmIHXhnj75ZdRr7zqE1/dPrfjNpALF+vx8izyELGWu89ur2cl32E8ZfpB3e996y5PPFNw0q6/r5Tev+9JLGsuYxbgvymDTpuYQ8/nydviAEdZyEIfaRSetk3faB+0j97MNreZ7G2F3BZcm208HkhdERDBhzhbEZoz5KnS637/2VyLbOpN3LyeP/h8jgwEeBZ9blj6l1+uNd6najHD5OtlM+Rl87izzW/KJDXbpH3e57/0Ed07mA7O3yVPKYuz3ZG2ZdxqaH7tpjZaI62Rx8dBny8Y5B58lHkPv+RBexQfbsuG3QftKok/eoc7WOb0mHaWr76mOvh9fS5Hg053bdR/rq5MdoTOHYeH+zut8OOpf06uRRZO+X3rP7jybPUyd22Ucaebqwbt+VTPAIAN1maOu1AZbtW74tw51Lem2Aj9b1Nuh7f2O6nwOa5+7eOWl5sufL8XQ8BywpPzOegSX9p1b2W8iT/Pfou5tINibO7LAzrUOeVJoRxZMZe1Z/DfoGRWtJZ12yYdZ7ju2H5Ml9n7oDDDW+AXkhfinZsHs4+bztzXQcAK+m0TYI5fNqnt5dlw8aj2Bj8sKldzd2V/KA/vYht6l3h+vt9fUN1Ol3yMHxTqHb84xzajpvaSw7gTzBfoJ6Yuw/QHZI91N1/c+TJ7Q9Gu+tTF7gdJk27ym1XjUbiuvWejVwYL7GOh8mL+jOIU8y8xi7MJ5by6vTuARkg7g3hVRvYKzlyOcZ96tlt1OHdC4ie57s2Vi2BtmweS75zGSXdFpnQanvr0TH7nd1P3lp87uqv19InnhOZ0Dvjvr5z5EjFr9xgveWonZ57pDOx+p3/Umy0b91/b72JO9cLaRlqrAJyugQxmYvuZLG7CX1Mxt3LKPv13UPJANI0ff+VnR4TIURzWBCBg1eTTZmP0jOPvAixj9y1OkxFbLx8VPymHQdecxdinwEYy4ZjOwyy8v5jE1R+xDy4v5tDDE9ZV13opl5rqDx+A7dx9s4l7yQfhN5TmkOILs6eVzqMsvPFxgbx+J9ZHfQj9XvcR6NmRU6fP+9mXn2A/5Gdt3vPY++DN0H+ptsVp1er68uj/V9hezF9SzymPiixnu9QPzAPDH+Iu30WjZ79cqcbMwe0nHbdibH7Pll/ek9l/4I8vy1H5Mc/xrpnNfbHjKAtT15/m720Osy3sonGulsTR4HPsPYAJDbddlH6mebM0ZdS2O8H8bOT13O480Zoz5ABmrfQaONQ4fnv8nHCV5DHsNeR/Y0/Eyt02vRN7PCgLQuq2k8ljyOXFrT702Ztykd2mAT1e1GXZxDjjb/iAFp7Alc3Hj9S/KC7R21zg97h7m1Ttbfq9Ot50p/nXw02du3l84+wAFD1smtyEdCmnXycXQY74h7zmC2SeO9Xm+2rueSZp08jWwfvZ2xGy570qHH6QR18qb63T2of1/pkFaXdkCXAZabbYC1Gd8+XZM8D3R5JKzZBjiSRnuJsf1/4E3E+rnmuft6xmaZ2I6xc0Dnxx0W958Zz8CS/lMr7WH178fWg9k3gX3qshfR11V0krQ2oxHVJk/uH69/n0vf4w+TpNPrdr1i3860Uj0gdWpQNdY7kGy8zqOO5kte1A7zfOwGTDwI5cr1ALBhhzTmUiPjjJ38Nq1lvl9zeYe0NiEbG18EvtFYPoeMYnedleHh5EnmTPIC5uq6fEPyAj+65qmut3etP/PIuyfPJ0dj/gLZ+Lgf8OwO6TyPjKB+lGx0zh8mH4109iVPPuuTz5GdTePxDbKBvm3HtJ5R01qJbLD+Z61bb6XjM3+N7+hVZODg+kb+nk3HZ+Ia+WnOhPFM8mL6ZDrOntBYd6u+tBaSJ9pzh9nfyIuFL5F3TK8h7zrN7/tMl/man042NpevdelcctrTj/SnNyCdVer+tW3f8v3Ixu3AcQj6yujCmrdzyePBXox1yX8A3RrouzKCGUzIBuGXG69vquXzRrJx03m2mEYaTyEbG7uRF7K/b/6PDuvvQF5w7Fjr+QVkQ/RoMojYaRAz2mfmObDuN++k+7Fybxrj6pA9VT7E+MGjujwfvS55vJ1fX/+CvDt7aC2nl3bMz9o1nWaD82iyl8jZdJxWsq73aPL4sR8Z+O+fVWc9ut2N24Px55HH1nRXr69Xp9t+u2dfWf+SbOgfTR4Xhgoe1TSeTV607kMG/f8IfH6I9XerdWaD+vq8Ws7vq/Wz09g2ZEDvWMYu0q6qdfSQuu+9fIg87cuinTHqCPL4+6yO6WzYl879ybu5b6WOBTTEtq1BXpDfv7FsHtnb8+PARkPU7YvJtkBb3e4yKPJWjA2y/UzGxuxZgTzPHTqFOrlvLfup1snH1e9q/b46eRLZVuraJlmaPBb2phFt1smbqUHOjttzWX99pBGkGWLbBtXJpwxRJy9qqZMn0CHQ21h31VrGbbOYdWoHkMHCC8nHLs5lrA3Q61m9Ht16Uj2OxlSu3LMNsC5DtHPqOvuQN0h75+4/NL+H+9LPjGfgvvDD+MEHl64708VkVO3nQ6TT3016JfJO79GjqMAMMeBj33qbkBd532VsQLxOdxgnSGvKg1D2lxVjXUCfVQ8ak0bOW9JZo55s3kFGO3ekMWtExzS2JO9+vYqxbuF7Ad+ZQn72pj5Lx/jHE46jDorVMZ1jyaDD88kLyJPrQbHXha7rBci5jB9w7s00RhfuesKon/00dVAw8iT4fbJ3wAlkA3T+EGnNJU/uK5Ld435DBkoeNGR+Tmc0s6CsSzb2HkDeEfwceYFzJPA7OvbwIJ+l7d01XUBeoB3QeL/rxd5LGP8Yzz6NbXvFkHXyBWTDZaLZS940RDor07jIIHv3XEJegGxGBvIGPm/PiGYwIbs6blX/fgKNZ2HrftN5do/GeltT786Rd4tuJYOtp9NtZPeNyeP9W2p9Oqfx3gsYbqaQjWmfmadTfurnd2qU0xyycXcWGWQZdp70U8gus/9F4/hYy+1UOtzZr58/vaY15Zl56ufXpDGWEVOfVWdbai8Axu4ufo7ae4a8q9nlLuojmfwi7TX19TDB6N0YG1DvwWR36r+Qwe45g9Iiz2+fII/ZnwDOrstXrHW184w65Pn/veR59ujG8i3I40DXO7KjnDHqC2SAfVozRpFtvuPIHnR71NerkMejTuNlNdJaCJzRt2w+2UOz0/GbHI9osrr95UF1u27Hto3XKzA+YHgoHZ79nyDdPVvq5Okd6+RmZBDl6Fonz+qrk6/qup+Qd89Pa6mTJ3epk2RPte0a9XGbqdbHmsZks5i9e8g6+Z6WOjnpGEkTpPUCGo8FNZZ3bgeQ1zWTtQGuplsbYEMyCL4UY48WNNsA5zP8LGaPZOJz94fpeK5cUn6WQotcKeVfjb//WUo5kzr4DLlTdU2n9KVzB/lc6eFkN6Upi4jlyQu0hcOuW0r5IXn35DdkF3DIHWpopZTLSymX1TwtQ160TyVPpZRyV/37LDKKucoU0vk9+fzXHHIbjyRPsMOkcW0p5eRSyomllKsjYiXyTtpbhs0P2dtghYg4tLd91ZHAGhGxzWQrR1qKPJl+qZTyUfIu9h1kZP2AiHgH2aVvUDpLkyfnWyIi6lufJU+KRMTLyG74k2qkdUop5dKImEc2+HcppXyMvMP7J/Iu+0ARsWzd51YnnwE+lbwj9m3g8oh4Zof8zCUvfm4ig0aHkBej/0k2kP9KNmg6KaXcRJbNs8lAxCmllN+WUt5KNko37ZCneeTzjF+LiKVLKbeRJ/jXR8QL6/8pk6XT8D3gpRHxiojYlqzTPyYfqdoxIlYflEBEPCAiHkte0C8AfhURL298ZA7ZK2qgiNiQLIM/1v2DUsoHyKDfr8jg1h9LKRd2SGc54JsRsVzj2HshsHZEPB24u5Ty9Q7prATcWY+Nl5Hd8HtWIhsPXbZto4jYJSI2LqVcDXwoIg4gjytHlFIeAPyklPKPDnnqzerwM/KRgLc2PnK/Lnmq+dmV7A30V/JYtHNE7BwRK5CBhPUG5aemtQH5GMBfImKFUspdpZTfkOe1VYETI2LVjnl6NBkw+AnZAL248ZHeAHn/7JjOh8iA9pfI49HR9SO/pnud3JzaQKzHJ0opR5EXEEEeM+eVUr42IJ3NyAbsN+qi3jZ8Edg4Ip5C9lq4ukM6ywJ/q8fbC8iANKWUvwK3k11qBx4HIuLhEbF7RGxWSrkIuKTm42jy+LYKcHP9PlvTqnlahgzq3EHWwaNqHu4k78ptPllemvkhy2YZMjD7uFrnIYPaD2i2pSZJa1PyfHFnRMyt2/Ad8ti7fkS8rPd9dsjTDuTF4Z/I8v5y4yNrkL3FupT1DmTQYG2yfj8VOK6U8mfypsaWg/JT03pSROxItvfujogfNI79fyHPV5O2AWo6e5JB9C/1ltW6/f/qyzvIbuWtdbsep08DTo2IUyJi61oP/1TfX4MM/L+v47btFRG71LycT9bJZ5DHp16dvKVDndyTvHHwfLJr+Qpk26ZZJx9WX0+WzpMiYtdSyjfIi8TzgCf01cmNBtXJiNgD+EMp5dv1f95VSrmKfAyrc32sae0VEY+q615PntcuaXxkjZqnQXVyr4h4JBl8XJ6sk89kfJ3cqmOeem2Iz5AX/r+KiJc2PtKpHVDbDpsDV/bak7UNsC95bXEp8OcObYCDyePQf5VS7m6cM74IrFPbAGVQG6CXp4jYrublO+S5+/nU4H09d/+sy7lyiTLTkYv76g+54097BE6ycXTMNNOYU9PpfGegJZ359XfnqekmSWvoQSjb0hnR97US+TjFhiNIaw6NkXU7rtMcHGhbskva9eTdvqXJi+QfMeDONeN7KSzTl+6DyOfd/8ng2SLGpdP33inkHdpvUwfbHCKtXi+R+X2fuYbBs470jzB+P/KO0Fuody/J7pkPGCI/U54FZYLvbWPyLsFPyWDBg8hgyA+ALbvmqb5u9mLajbz7/ORB9b0vP7vW7+g9zX2f7I653oB01iLHRfgaGWB5LHkH5mrGZi8ZuF19af03eSfgORPk9W8dymgt8jGir5F3CZ/TeG+Nur/8mm6zzlzeyM9z+95fjQyWTLqPNNL6Wi2TC8lG0PxaTj8YtH7Ltn0GeFJd3rubsgoZDJo0T4xoZp6+PF1GXlQ9u/m9MfaY1qCBDHvf/9drXepNCfsR8ph2WP096bGkL52Pk8fG5nSeq9bvf2DPs5rWFWSw79/fNeMfFfxLxzxdSfYuaKYzh+zN8B1yP5l0Vh3Gz4R0bW9fYGzQv/uTPf661slv1+/mejLY0Hv06Zoh6+SVZK+zb5HHyuWp3dvJHkQD89TIz+fIwPgC8lhyDXmxdkQtp82HyNN0Z4zqff/nkfvKg8iL0fPJC7UX0WHGqL50vkYeO9Zk7Lno+eT5oEtvk94sVteQgYMNyQvga2qdP5oMKE766GtN52byGHRyf11q1O3WbeOeg2MfRB5z30geR9Ygx115/6Dt6tu2q8gg1KZkkOx64JtD1Mnetl1DBmo2qMt7PSlX61gne/m5mpxVYI1aL6+q9X1hlzpZ0/lt/Z+nMLb/9wZu3qdLfezL07VMYxazvnTezdjjHM0y6lon1yAD2M0BcXep+biQbFt0mcWsN6vaRo1lcxg/iPydDG4DrEHeAOnNLBI1nXlkAOl7dJzFrLFtGzaWbUT2hL2ma51cEn96O7/uZRExhzyZ3TGCtJYqpdw9gnSiWCFmpYh4H3kifXPJXhBExCvJk8W1ZIPt56WUQzum86aSd6x7PTvuKqXcFRHHkifbfTukM6+mc2tdtnQp5Z8R8QLyjt+xpZTDO27bPLJ3QC9Pc0rtURERveftJu2dU9NZhryL20tnP+q0QqWUzw/KS19+jmiU9Uq9fTUijiOfkR/YW6iRpzc3yumpZLe+n5GN65+XUhZ2zFOzjJYmv7e7I+JI8g7hhzqm8+ZSyu/qsnmllP+pf59IXug9d0A6p5N3yd8eEU8iy3ibUsrttRfCP8geAj+bLJ0J0noi+Zzlk0opP6rvb05e7J0xzXQ+BtxZSnnpVNOpd0IOJxv+b5zCtp1E3tUL8qL/112O333p7Fnz9IRSyk/r+4eQz35O2huqL529ajqPLqXcVsv5DuB/evV+yG0bV971Mw/s5XGIPB1PPrJ0c72bujJ58fDNIdJ5Uk1nj1LKj+v7e5IBsVM7bNuHgV+UUo6OiDeRwZUgG9QXkUHJ7Uop755iOj8nL0BPIPfbZ0wjP5eSY2esUkp58RS2bSOy58lvgRtKKRc0jwkd03lzzdO/yHEXvkJepK/Z4bjdTOctNZ3fkXcZV6pp/aNZr4bYtl45/YycKei6iNim5B3fYbZtffJi+mdk4P6P5OMv509h2/5JltHFZK+1jUopR7enMtY2q/v5ivX/70H2gNiCvIj9OXnMbe25MkE6t5KPTX6T7PXwMjKItGMp5bjJ8lTT24a8+DyMPOZ/kOxJezAZLJlbBvcSmihPTyLL59XkIH+XD6qTE6Tzh7pt/03e8X4ZGfBZf7I62VLWe5GBtkfVfF0P/Kt3bBliu5plfVAp5S8R8YhSyneHLKM/kPvXhWTQvnee/UYp5YIh0rmNLOteGb2cDLg+ZFCdrOl9GPhRKeVdkb2PnknexLqObGdeBvy9ZK/krulsQvY03JKs1+eT+8xjSikfnGI6V5GBydeR31uXnrD9ae1L1u0bge+XUr7U5Ti5RCozFLHwxx9/uv1wz5k5+qfg2om8czHpADsTpPO6vvdXISPgkw4c1yGdrchIbZeBlQal9XCya/agWUf603l9473OAxlOkM6UZkFpSeuNfe9vSkbDJ30OvEMZ9QLAk/b06VCPNiDHX5h0AELaZ3h5S/37/nSY4WWStN7H2FSo6wGP71BGg9JZnRwNfdIBnzqks0ZNp8v0m21pvan+vX7dtkG9RAblaa0ueZrkextqZp6OedqwS1qTpNPL0/LTTKeXn7XJ54wHDkLG4JmHTmjud9NI56i6fNKRxjuk8866vMvMDm1pPa2mdY/pBqeYp3fV5YOOSW3pPKOm07k3ZYc8dXqefJJ0nk7exX7DNNN5CuNni+o8Dgjts1gNO0ZCfzpX1HR6Y24MM0ZG/+DYh5K9DzodSybJ07fIYNawvTPbtm3vrvtJSzrfrensOaL8DJz1YIjv/7EjytNTutZJ2mdDO6DW74EzOw1I57k0jrcjSOcddXmXc/egbTtp2O9uSfqZ8Qz4448/k/8w8cwc32qc5F9Fh0H/WtJpzvBxCN26cLWls3ddth/dpygclKcX0q3L3KA8vZoOAxB2yM8ws6C0fW+9rtivHdH39oYRpXMwHboD189ONsPLeXSc4aVjWi8aQTr/nsJwmul8gTrw4jTS6g1od+6Itq1zngakc+4Iv7fOaQ1I5/MjTOeFQ2zbZDMPfYkBjzt1SGc+2XNhgxHk5ysM8QjdgDxdSPepU0eSpwH5+SLDzTozkhmjJklnZfIu/yjK6Mt0nAGhL83+C/VXUZ+/n2Y6h9Z0hh0Ub5SDYzfz9Iuap3NHtG1TCWZMVNajys/nhk1nkjwNNYvZJHn6/DDfP+2zoa1S992ux8m2dFau6Ww4gnS+Qn0sYwRpXTiVfXdJ+ZnxDPjjjz+Df2ifmeOHwE9HkM6N5CAvo8jPL0a4bcPMOtKfzv5kd+AfjLCMOuenQ1qjyNMot22Y739kM7yMKq3Zls5szJPbNr0fpjjLz2xPZzbmyW1rXXckF+qjSqeu9xTygro3Y8iUxpaabds229KZxXka1WxosyqdUae1pPzMeAb88cefqf2QdzHuZMhuirM9ndmYJ7etUzonkFNe7jSCMhpJWrMtndmYJ7etc1rLk4/4THc/mVXpzMY8uW0D0xjVhfpI0qnrzq+/pzU49mzbttmWzmzNUyPNZcgxIKa7n8yqdEad1uL8MxdJi6snAReVxpRKS0g6szFPbttgp5GDmF02zXRGmdZsS2eUac22dEaZ1qxKpw5mvD7w4ensJ7MtndmYJ7dtsFLKZyPi4lLK/0VOOTlwmspFmU5N6y/195TTGGWeltR0ZmueIAdfJMfKef8095NZlc6o01rcOauCtJiKEc3MMdvSmY15cts6pzWSGV5GmdZsS2c25slt65zWSGYemm3pzMY8uW2SNPsYOJAkSZIkSa2WmukMSJIkSZKk2cvAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVv8fRNtriohxlWgAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAHPCAYAAADTUpj2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAC5eUlEQVR4nOzdd7gkRfXG8e+BJeecc0YySBQliKJIUAFBBBQQlSBRRQREiSaCoigZCT8JKkGCZJGcc5AsOUoSRIHz++OtcYfrzp2eO33nzu6+n+eZZ3dS3Zru6urq09WnIzMxMzMzMzMzMxuTCUa6AmZmZmZmZmbWvxw4MDMzMzMzM7OWHDgwMzMzMzMzs5YcODAzMzMzMzOzlhw4MDMzMzMzM7OWHDgwMzMzMzMzs5YcODAzs7FeRNwbEWuMdD2GW0QcGBEvRcRzI1iHNSLiqZH6+1VExBYRcclI12MoIuKQiNh1iN+9KSI+VHOVzMzMHDgwM7P+FhGPR8THB7z25Yi4pvE8Mz+UmVe1KWfeiMiIGDVMVR1WETE3sAeweGbOOtL16QdlfS448PXMPC0zP9Huc/0mImYCtgJ+U57PFRE3RMQrEfGzAZ+9KCJWGFDET4Ef9qa2ZmY2PnHgwMzMrAY9CEjMDbycmS90+sWxNVjSD3q87L4MXJiZb5fn3wVOBuYDNmoECiLiC8BjmXnLgO+fB6wZEQ4smZlZrRw4MDOzsV7zrISIWDEibomI1yPi+Yg4rHzs6vLvqxHxZkSsEhETRMQ+EfFERLwQEb+NiGmayt2qvPdyROw74O/sHxFnR8SpEfE68OXyt6+PiFcj4tmIOCoiJm4qLyNih4h4KCLeiIgDImKBiLiu1PfM5s83fe/jwKXA7KXuJ5XXNyiXabwaEVdFxGIDlsl3IuIu4J9jOgCOiA9FxKXljPbzEbF3eX2SiDgiIp4pjyMiYpIWy/4DZ/Mj4qSIOLD8f42IeCoivl2W77MRsVFEfDoi/lb+7t5N392/LIPfluVz7xjOqrfVPCMlIhrr/c6y7L5QXv9MRNxRlt11EbHUYMsuIvaKiEdKve6LiM8O+JtfjYj7m95fLiK+FRG/H/C5n0fEkS2q/ingL03P5wOuyMzXgJuB+SNiamAvYO+BX87MfwG3Ap+svrTMzMzac+DAzMzGNUcCR2bm1MACwJnl9Y+Wf6fNzCkz83p0hvfLwJrA/MCUwFEAEbE48CtgC2A2YBpgjgF/a0PgbGBa4DTgPWA3YEZgFWBtYIcB3/kksDywMvBt4BjgS8BcwBLA5gN/UGZehg4qnyl1/3JELAz8H7ArMBNwIXD+gMDD5sB65Te/21xmREwFXAZcDMwOLAhcXt7+XqnfMsDSwIrAPgPrVdGswKRo2e0HHFt+7/LA6sC+ETFf0+c3AH6Hlul5lPUxVJnZWO9Ll2V3RkQsC5wAfA2YAV0acN6A4MjAZfdIqe80wA+AUyNiNoCI2ATYH11mMHX5DS8DpwLrRsS05XOjgM2A37ao7pLAg03P7wHWKd9fHrgXOAA4IjNfbVHG/WidmZmZ1caBAzMzGxucU84MvxoRr6ID+lb+AywYETNm5puZecMgn90COCwzH83MN9HU8M3KAd7GwPmZeU1m/hsd9OaA71+fmedk5vuZ+XZm3pqZN2Tmu5n5ODog/diA7/w4M1/PzHvRgeEl5e+/BlwELFtpicAXgAsy89LM/A+6vn0yYNWmz/w8M59smvre7DPAc5n5s8z8V2a+kZk3Ni2XH2bmC5n5IjpQ3rJivQb6D3BQqePvUFDlyPL37gXu44MHutdk5oWZ+R5wCsNzELw98JvMvDEz38vMk4F3ULCk4QPLLjPPysxnyro+A3gIBVQAtkPr9eaUhzPzicx8Fs102aR8bl3gpcy8tUW9pgXeaHp+CApW/AW1+YmBpVCA6PSIuDoidhpQxhulHDMzs9o4cGBmZmODjTJz2saD/z2L32xbYGHggYi4OSI+M8hnZweeaHr+BDAKmKW892Tjjcx8C51FbvZk85OIWDgi/hQRz5XLFw5GB8rNnm/6/9tjeD7lIPVtWffMfL/Up3lWxJMDv9RkLnQWvW3Z5f+zV6zXQC+XIADo98Hgv7n5jhFvAZOO6TKLLs0D7DEgGDUXH/yNA9ftVk2XNryKZoc01u1gy/JkNMOC8u8pg9TrH8BUjSeZ+UpmfiEzl0YzaX4B7IwuVbgH+Djw9eZLVMr3Xx3kb5iZmXXMgQMzMxunZOZDmbk5MDPwI+DsiJiC/50tAPAMOohsmBt4Fx3YPgvM2XgjIiZD09o/8OcGPD8aeABYqFwqsTcQQ/81g/pA3SMi0AHs04PUr9mT6PKMtmWj5fJMi8++BUze9HxsSMz3JJoFMW3TY/LM/L+mz/x32UXEPOgSi52AGUrw6h5Gr9sn0WUxY3IOsFRELIFmeZw2SL3uQkGvMdkeuCEz70GXNNxSZsLcXZ43LAbcOcjfMDMz65gDB2ZmNk6JiC9FxEzlDPyr5eX3gRfLv80Hy/8H7BYR80XElGiGwBnlmvazgfUjYtWSN2B/2gcBpgJeB96MiEWBb9T0s8bkTGC9iFg7IiZCt2p8B7iu4vf/BMwWEbuWZIhTRcRK5b3/A/aJiJkiYkZ0mcapLcq5A/hiREwYEevyv5dmDLeJI2LSpseEY/jM83xwvR+LztSvFDJFRKxX8j6MSSPw9CJARHwFzThoOA7YMyKWL+UtWIINjYSFZwOnAzdl5t8H+S0XMoblFxEzAzuiNgjwGLp7wpTACsCj5XOTolwIlw7yN8zMzDrmwIGZmY1r1gXujYg30fTuzUr+gbeAg4Bry3TzlVGCvFPQdeiPAf9CU8Ep19/vjK7LfxZ4E3gBHZy3sifwRXSd+bHAGfX/PMnMB9HU918ALwHrA+uXs9BVvv8GsE753nPomv01y9sHAregM+B3A7eV18Zkl1LGqyg3wjkd/5ju3Isud2g8vjKGz+wPnFzW+6blNoZfRYkX/wE8jJJkjlFm3gf8DLgeBSGWBK5tev8s1LZOR+v+HGD6piJOLt8Z7DIFUNLET5fZLc1+inJOvFmeHwKshWY6nN90W8b1gasys9XsEDMzsyGJzMFmMZqZmRlAObv7KroM4bERro6NRSJibnQJy6yZ+Xqbzx4MvJCZRwzh79wIbFsuZzAzM6uNAwdmZmYtRMT66BaFgc44rwQsl955WkURMQFwGDB1Zm4z0vUxMzMbirqzFJuZmY1LNkTTywNN3d/MQQOrqiTlfB7dlWLdEa6OmZnZkHnGgZmZmZmZmZm15OSIZmZmZmZmZtZSTy9VmHHGGXPeeeft5Z80MzMzMzMzszZuvfXWlzJzpjG919PAwbzzzsstt9zS/oNmZmZmZmZm1jMR8USr93ypgpmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbVUKXAQEdNGxNkR8UBE3B8Rq0TE9BFxaUQ8VP6dbrgra2ZmZmZmZma9VXXGwZHAxZm5KLA0cD+wF3B5Zi4EXF6em5mZmZmZmdk4pG3gICKmAT4KHA+Qmf/OzFeBDYGTy8dOBjYaniqamZmZmZmZ2UgZVeEz8wEvAidGxNLArcAuwCyZ+Wz5zHPALGP6ckRsD2wPMPfcc3ddYTMzMzMzM7PhNuuVdwz5u8+tuUxt9egHVS5VGAUsBxydmcsC/2TAZQmZmUCO6cuZeUxmrpCZK8w000zd1tfMzMzMzMzMeqhK4OAp4KnMvLE8PxsFEp6PiNkAyr8vDE8VzczMzMzMzGyktA0cZOZzwJMRsUh5aW3gPuA8YOvy2tbAucNSQzMzMzMzMzMbMVVyHADsDJwWERMDjwJfQUGHMyNiW+AJYNPhqaKZmZmZmZmZjZRKgYPMvANYYQxvrV1rbczMzMzMzMysr1TJcWBmZmZmZmZm4ykHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrKVRVT4UEY8DbwDvAe9m5goRMT1wBjAv8DiwaWb+Y3iqaWZmZmZmZmYjoZMZB2tm5jKZuUJ5vhdweWYuBFxenpuZmZmZmZnZOKSbSxU2BE4u/z8Z2Kjr2piZmZmZmZlZX6kaOEjgkoi4NSK2L6/NkpnPlv8/B8wypi9GxPYRcUtE3PLiiy92WV0zMzMzMzMz66VKOQ6Aj2Tm0xExM3BpRDzQ/GZmZkTkmL6YmccAxwCssMIKY/yMmZmZmZmZmfWnSjMOMvPp8u8LwB+BFYHnI2I2gPLvC8NVSTMzMzMzMzMbGW0DBxExRURM1fg/8AngHuA8YOvysa2Bc4erkmZmZmZmZmY2MqpcqjAL8MeIaHz+9My8OCJuBs6MiG2BJ4BNh6+aZmZmZmZmZjYS2gYOMvNRYOkxvP4ysPZwVMrMzMzMzMzM+kM3t2M0MzMzMzMzs3GcAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWUuXAQURMGBG3R8SfyvP5IuLGiHg4Is6IiImHr5pmZmZmZmZmNhI6mXGwC3B/0/MfAYdn5oLAP4Bt66yYmZmZmZmZmY28SoGDiJgTWA84rjwPYC3g7PKRk4GNhqF+ZmZmZmZmZjaCqs44OAL4NvB+eT4D8GpmvluePwXMMaYvRsT2EXFLRNzy4osvdlNXMzMzMzMzM+uxtoGDiPgM8EJm3jqUP5CZx2TmCpm5wkwzzTSUIszMzMzMzMxshIyq8JnVgA0i4tPApMDUwJHAtBExqsw6mBN4eviqaWZmZmZmZmYjoe2Mg8z8bmbOmZnzApsBV2TmFsCVwMblY1sD5w5bLc3MzMzMzMxsRHRyV4WBvgPsHhEPo5wHx9dTJTMzMzMzMzPrF1UuVfivzLwKuKr8/1FgxfqrZGZmZmZmZmb9opsZB2ZmZmZmZmY2jnPgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxaGjXSFTAzMzMzMzOry+VXLDCk76291iM112Tc4RkHZmZmZmZmZtZS28BBREwaETdFxJ0RcW9E/KC8Pl9E3BgRD0fEGREx8fBX18zMzMzMzMx6qcqMg3eAtTJzaWAZYN2IWBn4EXB4Zi4I/APYdthqaWZmZmZmZmYjom3gIOXN8nSi8khgLeDs8vrJwEbDUUEzMzMzMzMzGzmVchxExIQRcQfwAnAp8Ajwama+Wz7yFDBHi+9uHxG3RMQtL774Yg1VNjMzMzMzM7NeqRQ4yMz3MnMZYE5gRWDRqn8gM4/JzBUyc4WZZpppaLU0MzMzMzMzsxHR0V0VMvNV4EpgFWDaiGjcznFO4Ol6q2ZmZmZmZmZmI63KXRVmiohpy/8nA9YB7kcBhI3Lx7YGzh2mOpqZmZmZmZnZCBnV/iPMBpwcEROiQMOZmfmniLgP+F1EHAjcDhw/jPU0MzMzMzMzsxHQNnCQmXcBy47h9UdRvgMzMzMzMzMzG0d1lOPAzMzMzMzMzMYvDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLbQMHETFXRFwZEfdFxL0RsUt5ffqIuDQiHir/Tjf81TUzMzMzMzOzXqoy4+BdYI/MXBxYGdgxIhYH9gIuz8yFgMvLczMzMzMzMzMbh7QNHGTms5l5W/n/G8D9wBzAhsDJ5WMnAxsNUx3NzMzMzMzMbIR0lOMgIuYFlgVuBGbJzGfLW88Bs7T4zvYRcUtE3PLiiy92U1czMzMzMzMz67HKgYOImBL4PbBrZr7e/F5mJpBj+l5mHpOZK2TmCjPNNFNXlTUzMzMzMzOz3qoUOIiIiVDQ4LTM/EN5+fmImK28PxvwwvBU0czMzMzMzMxGSpW7KgRwPHB/Zh7W9NZ5wNbl/1sD59ZfPTMzMzMzMzMbSaMqfGY1YEvg7oi4o7y2N3AocGZEbAs8AWw6LDU0MzMzMzMzsxHTNnCQmdcA0eLtteutjpmZmZmZmZn1k47uqmBmZmZmZmZm4xcHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrKVRI10BMzMzMzMz662ffeEzQ/reHmf8qeaa2NjAMw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKylUSNdATMzMzMzMxs7PbXXX4f83TkPXb3Gmthw8owDMzMzMzMzM2upbeAgIk6IiBci4p6m16aPiEsj4qHy73TDW00zMzMzMzMzGwlVZhycBKw74LW9gMszcyHg8vLczMzMzMzMzMYxbQMHmXk18MqAlzcETi7/PxnYqN5qmZmZmZmZmVk/GGqOg1ky89ny/+eAWVp9MCK2j4hbIuKWF198cYh/zszMzMzMzMxGQtfJETMzgRzk/WMyc4XMXGGmmWbq9s+ZmZmZmZmZWQ8NNXDwfETMBlD+faG+KpmZmZmZmZlZvxhq4OA8YOvy/62Bc+upjpmZmZmZmZn1kyq3Y/w/4HpgkYh4KiK2BQ4F1omIh4CPl+dmZmZmZmZmNo4Z1e4Dmbl5i7fWrrkuZmZmZmZmZtZnuk6OaGZmZmZmZmbjLgcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMyspbbJEc3MzMzMzMyG0/777z8i37VqPOPAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpyckQzMzMzMxt37D9NF999rb56mI1DPOPAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpyckQzMzMzs7HE/vvvPyLfte7cv+hiQ/7uYg/c/9////LrVwy5nB1/vdaQv2vmGQdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWku+qYGZmNp4aanZuZ+Y2s+Ew714XDPm7jx+6Xo01MbOBPOPAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpyckQzs0EMNXkcOIHcSHKCLbMK9p+mi+++Vl89+tjPvvCZIX93jzP+VGNN+tusV94xpO89t+YytdZjOCx58pJD+t7dW99dc03MRpZnHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZS06OaGbjpPsXXWzI313sgftrrMloQ02yNT4l2DIbVw01eRz8bwK5oSb/dOJPMzMbKs84MDMzMzMzM7OWHDgwMzMzMzMzs5YcODAzMzMzMzOzlhw4MDMzMzMzM7OWnBzRzLq3/zRdfPe1+uphHdl///1r++7lVywwpHLWXuuRDzyvM4FcbYbavt22O/bUXn8d8nfnPHT1DzwfavvuZrsYXy158pJD/u7dW99dY01G++XXrxjyd3f89Vo11mS0obbvgW27LkPtt+F/+24zG/d5xoGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTkiG3Mu9cFQ/7u44euV2NNxg8/+8JnhvzdPc74U401qZ+TEFUz1CRbw5Vgqx/VmUDOxj73L7rYkL+72AP311iT0dx3j9n41Hebmdm4zTMOzMzMzMzMzKylrgIHEbFuRDwYEQ9HxF51VcrMzMzMzMzM+sOQAwcRMSHwS+BTwOLA5hGxeF0VMzMzMzMzM7OR182MgxWBhzPz0cz8N/A7YMN6qmVmZmZmZmZm/aCbwMEcwJNNz58qr5mZmZmZmZnZOCIyc2hfjNgYWDcztyvPtwRWysydBnxue2D78nQR4MGhV7cvzQi8NA6WU2dZ/VZOnWX1Wzl1ljUu18m/rbdl9Vs5dZbVb+XUWVa/lVNnWf1WTp1l9Vs5dZbVb+XUWVa/lVNnWf1WTp1l9Vs5dZbVb+XUWVa/ldNP5snMmcb0Rje3Y3wamKvp+ZzltQ/IzGOAY7r4O30tIm7JzBXGtXL6sU7+ba5Tv5XTj3Xybxs76+TfNnbWyb9t7KyTf9vYWSf/trGzTv5t445uLlW4GVgoIuaLiImBzYDz6qmWmZmZmZmZmfWDIc84yMx3I2In4M/AhMAJmXlvbTUzMzMzMzMzsxHXzaUKZOaFwIU11WVsVddlGP1WTp1l9Vs5dZbVb+XUWda4XCf/tt6W1W/l1FlWv5VTZ1n9Vk6dZfVbOXWW1W/l1FlWv5VTZ1n9Vk6dZfVbOXWW1W/l1FlWv5VTZ1n9Vs5YYcjJEc3MzMzMzMxs3NdNjgMzMzMzMzMzG8c5cGBmZmZmZmZmLTlwYDaOioho+v+Qt/XmcsZl4/rv7KYNjA/6cf33W536rT7gOg0mIiYcpnLdlwzCy8fq0C/9yHDwNjL28oobh/X7htkP9YuISSJi1ohYvp/KGlDuUHceE0XE3BGxSGa+30UVhmU99cP6B4iIpSJiihzHEr6ELBIRW0bETI02MJTl3i/rqm4RsWpEzAPQWP/9MFhrLO9+a5N11aeOZRwRU0D3dRqOg+sa6lRL0Dcz3ytldP0b6+pL6jJc2+lQf1PZ/y8M0OX+ti819pMjXY/hUONJltq2h4iYre59Ur/sxyNi6nFxGxmoX5Z33cbJH9UrETFBREw/3H+ji6/vGxGfGFDeiA6KI2LhiNghIuZtGnyMZJ1+BfwAOD4itml+YwjLvpayImLKiFgtIj4NGoR2uowiYlngXuAK4JKI+HFEjOqwHrNExI9QO1q9k78/SJkrRcQ3I2Kuflj/EbEocAfw24hYOiIm7YM6TR8RH46IaZpe63T9Tw6sApwGrAdcHRHTwuhBbYdlHhkRn+ukDi3qtVZELNR8IDNSyzoi5gV+BswTESdFxJTw3+1tWM7UduCnEXFJo04w4m1y1ojYprn/GEKbXDMiVoDagjSnRsR1EbHcUOtU6lLnwfVMEfH5iJhjqHUq2+5MEfHxiJhhKAPsiJgjIu6MiI+DfmNETFTeG0rgcFXq60u6FhGT19GGImLOiJg4IqaKiGUjYpohLu+5gN8DP4uIMyNi4qb3htSuIuLTEbF3RKw4lO/XKSI2RPvJXUoAYUT3k431VlNZkwMr1jQuqWXMHRFLAk9HxJ4RMVU3gciImLmbvqROEbF7RBwJnBsRO5bXhtSGImKdiNgpIraIErAb4rJeumy/XamrLxkbOHDQnZ8Am46pA+tiY5gvInaOiANg6JHriPgIsCFwc3k+YSlvxM5gRcRKwDXAwcAdEbFvRIwq73V1a9Ah1mcdYFFgL2BPYKkyOD4kIibpZNnXWRZwPPAV4IyIOCwiRg1hvZ0IBPB94ErgE8CcHdbjF8DUwAzAZmVQvExEzNBhXQCIiNWAo4GVgdsbnfUIn1V9DngZWBo4DNg2Ij4E7BgREw5lkF2Dn6M28LmIWK6x/qtuIxGxFPA0ukXQ4Zm5GXABsENE3BIR20H15V4OGlZDQahGYGuKaDqorVjOAsBlwJeB9SJi7vJWLQPAIdgf+BOwI/AasFrZ3pYFsq6BaadK3/0V4M7MfDMiJoqIqfXWyNQJBVimaRpUT9B04NZ2GykD4XOBLSJiu4hYEIa+7Zdt4Q5gQeDMiDipvLVO6acq1SkinoyI7Utd3gudDOgmgHAQ8CNg64jYqHGA2zjQqlCnpVCfdD2wHXBPRGwVRQf1WBJYAC2bayNi/sz8T0TMmpnvD2F88j1q6Evq0OjfykFDDDWAUMq5F7Xti4FvARdFxCcb5XVQ5sHArcCWqO9dOSIOjoiFG4GpDuv2kVLm7MA5EbFEREwaI3DGvyynU4BngE3RuGDbiFic0fvJngUQmtbbbhEx/5i216r1adrezqZpXNLcrjqoV51j7neA+1B7uikivlT2uRt30i7LmPsPNPUl5fWeB3xKe9kS9R8HAatExELNy6eD37UE6mcXAT5ayu14WZf1/1dg+4hYe0xj2yp1qrkv6XsOHAxRRCwErA+clpn/jojZI2KFiFgZutqRngLMCKwbET+KiOnK3+t0XX0B+FVm/qM03p9GxAMRsWuXA6NuHAycBDwJ3ABsDqwJ7ALUcka7Q18CDsvMfwCLAZsArwMLAbdExMy9LisUrZ4zM7cDpgfmBX4eEb+MiA93UMbswLcy8zTgEGAm4IDy/gLRZgZB6OB5vsz8RmbuDCyPghG7AD+MiPmq1GWA7wA/yswvAr8Bdo2I+yJitxiBwFGxIvAi6ui/iQZGNwN7o4DJSAQ1TkJBn9WB7YE1I2JXqm8juwA3on7kO6GzRZsBL6Ad9rcj4ogOdmTrA8dn5qsR8RngKLT97h46a19JZj4CnA8sAXwe+FJoVs2+Uc6G9kroAPw1tI3ODSwFrAN8DLgJuBr97pGwHfBKZn6r9N3XAM+iZb5hrytT+pP5M/Pw8vxLwIkRcUxEzFwxGPk5tO5vQut/m4jYJCJmKWUu2kmdMvNdFOg7EAW1Xo6Ix9GgbaqKddoHuB34RkTcFREfycz3m2YgTDP418foCLR/mxjt27aMiIOovt52QQcNbwHzoCDi6llExIxVCsnMi4GdUP92Pjoguh8Fbjsan5TB9FvA3yJiMj7Yl+zRYV9Sh13QYH994LqI+BR8YBZLpWUEfBUFMj8LTIUC7P+HguwAs1ZZTqHZJfMBP8vMV4FPA1ujoPvNMWD2YUW7AQdl5k5oX7kn2nb2jIj5h1BeN7YALkLb1quoH1oXBZI3RmONXmqst/lQEOOLoWDhhPDBKf4V7ILa9iulvMuAXYF7y7hklg7qVduYOzP/BnwX+DoKIn8D9StfbfQFFYs6GDiuBPy2pIwhRuhkzbeBEzLzEuAqFIj6XuPNDtfbAWhZ74xO4K7TOEgvZU1WsZxG2/4nOqGxdWi251SlnLkq1qmWvmSskZl+DOEBbAP8pvz/k6jRnAKcgwYko4DosMyNgMubnl+LOuvLgY07LGtb4LfA5GiHsw6wNnAJOps5YY+X1wrAueX/+5Zldi86s/JeWWZL9KguAUwCfKT8fxQagC7e9JmjgZV6WVb57I+ATcv/vw48haKq+6MzGUtWqM+qZR0vD0xcXj8DeKT8/07gwjblLIwOnr4E7A48WH7b8sBxwBc7XOZTle1jcxTEeA4NOtYs286JI9AmJ0SXlzyEDs52Kv++VJb1P4Gle1mnprrtW9rUemgK7HtlXUzb5nuToR3gjGhHfSs6OH65vLY0cCbaqU1RsS4boYOP6dGAfbWyPR8DnNpoY+3aZfl3NXSGf340MHqs9AHzA5P2eBmvAxxeftPvUbDvT+X3XoTOaH9kBNb910r7+0RpmzegQMu9ZT1+tMf1+RU6+7Uw8EXgz6VdHlf6p3kqlDEXMHv5/1LoQOgXaOC+N/BMh3UaVbbfXwLfK6/9Gfhbqesn2nx/KhTInL4837Us23Ob6ntoh3WasPy7HzqIWAoF2f5d2vqCbb4/GernLyrPv1p+z0vAVmXbvQCYu2I9NgP+WP6/NDroewnYpbxWeXyCAqvnAGcB1ze9PiMd9CU1tMX/9m9Ny+gBdMa40b+dUqGcSdAB/vplW/98+fds4C5gD3Rg/CFgtjZlTVDa4U9RoOfupvfWKq9XWtZo/z0pGjN8Dx3MvlTqulzZ5k6nQp9b0/IeBXwYmA3NYLkZjSvPKtvLfSgAO0OP6tNYb7OU5431dkpZPusCV3TQlr6O9oebo2DNu2h/tCHqdx+m4riEGsfcpR0sh8b+gWazPlvqc0aV7Y2mMXd5Pg0a33y2PF+eNn1JjettirKsl256bR7gkvL/1WkzJm363ixlG1i46bVvAb8u//8wsFPFtr18Y/sG1kD7umPRGGAb4MEO2uQGdNmXjC2PEa/A2PpAUdZTysZ4FBpITY2uKz4OmG4IZX6yfHcF4MfAleX1L6Fo4+odlDUFGph9DTh6wHu/pwyYeri85kAD4cnK7zkDRVIfQ9G5V4Aje1ynCZr+P+uA924GVu5lWWUHMSOjB8Wb8MEAxMGUoEKnv7O0rbvQIP1FYI0K39sMuA4NYvZsen1ndAa603psXjrTs4BrB9TvPHo0+Bjwd+dFO/yr0cH5NcCXy/sXAx/vZZ2a6rYFcEb5/3llGzkV2LbCdxdHsyUmQQOgRdBA5lHgzVLOnzqsz4/RQPaUAa9fBMzRQTnToqmTK6Hp1E+iQdUJaKZNL5fxDKUtvliWza/RAdrn0BmRvYHdelynKH33uWjQeBewVdP791AOlHtUn+Zt5BzgP8AKTe8fOZRtpPRva6JLu14Gth5i/SYp/eJmwF3ltS8217Hdsm56PjEKhr0CvA1sM8Q6rcboAMTvy7r8SZW2hPaTn0b7yQlKnS5EB/1vAn8HPtNBXb6HZh0chgIaSwB/6PD3zFCW1cdRgO8njW0VBQE76ktqapcTN/+/9E9/K8uo8npDMwI/hw7WJ0FB8oeAf6HxybXAWhXKWaJsC19As+oar29CGcd1+Ps+AvyuPK4c8N5lwFy9Xublb6+KDoRuZ/S47dcjUZemOk1U1tslKIjxlQ6/3xiPnonGO81t6XYqjktQv30EXYy50XHFZE3PNy7b2PmUPhL4ZsX6zIrGfRNTxqelTf+q/P+OTvqSmtfZqPI4G11u9ns6OBmFAr/Ny2kO4Lzy/z8Dm3dRr8aM2FcoY8GK310QBQ0m6qYvGRseI16BsfGBdqKBzg7fXTrQOZvevwb42BDKnb50PCeWDmyLpvcOBL7RYXmfQmfQ7kVTnhctndDlndatpuXWvKF/F51deRlFei+mzRmZGuuxNRpQHTSmTri8d3qvyyqfn6rp/xMPeO/GwdoVOpP/27LOD0ODmMaZ3pnR1LBsVx8U0W2c0ZkITeW+GAUM5gFuY4gH1CjqOjNwKJoWPiWwA+UsW4/W//RlO9gD7ZgnRgey9/HBs0U30+MzzmPYRnZD17pT6thyGynr5rMoSLDWgPd2Ap4vj7uBRSrWZ1T596MoEHkHOlBbo7SHtmd30GBznqbnC5bt4jLKIB/4VI+W7ySl3o2zslOgYMHL6IzOo8AjaMB/N7B2D9f9JE112qz8/dfRWaZdUSDjzV7WqdSnMWtpEZqCGOW1QbcRtA/aDh1sbjSG9/dq3uYq1OUz6JKibSmDQzTwfw9N461SxvQoaDUKmGkM7x8A3DCE5TR10/93KuXcXZ4vxCCBsbLtroLOMC/Y9HqgQfFtKGhw2iBlTIiu014WBQsmQwe0d6O+f6IOf8+c6GDqNGC/Ab/t7tIfVO5LamiHY1xGTe//ALi5QjlLoZMYHweWH8P7v0SXYnwVOHOQciZAZyxXbNp2J0cH+xejfctddHiwAExe/p0W7R8PRrMs5kf7yj/3aHl/Gp19PQvNgJsYndi4sqz37wF39KIuFdfbfsBtHbSl7Ut7WgTtW2dC/dTHyjJ/nDbjEkaPrxp95GYo0H8XHY65UYD28rK9N/a7M6H95F1DXGYx4Pkc6OTkboP1JTWvt9XR/nT7gdtC6Uv+CZxfsazpUZ82EQP6bnSC6yLg7Ip12hgFef5n+6zatpva5LqlLxi4vCv1JWPbo9HobYgiYl100DgNOhCZC9gkMz/WQRmTZ+Zb5f8zo0HQgmga3ckoGn46sFlm3jtIOTOgs7r/QhvWucAbaIOdv5QzJTpj9XBnv3RoyjViJ5f6zAncmJlblaQtFwH/QAeqy2bmQz2ozxJoR/hztGzWR8vmR5l5dihh2w+BvTPzmR6W9T20HD6MAgQHZuZLTe/viy7l+MIgZVyEBjGPozb0EGqXx2fmHyPiLDQzZqHMfLpFGT9BgYJVUBT4r2ia1SroGrlZgb9m5vcH+z1N5c2MBjuToGnNp2bmaxGxBTpgewYN4rfIzAerlNmtiDgT7UAnBaZDZ3S2Le/NlJkvRsT3gcVS1wb2ok7fRMt2PuDxzPxu0zby7cw8rs33ZwD+iAZBT6Ppli+gIMI86IzKL9GB3p8z87425U2amf8a8NrMqP3MgQJAL6GEaY8OUs68aCrzlo0+p1zv+StgxcxcdrB61C0ifoz6wAfQ9Nt5UWDm0IjYEx10TYICbLdn5sE9qtc86KzSRWj7uxPNDNkZTXeeHM1oOyMz9+xRnb7J6GX0YGbuP+D9xuVlY9xGSn6eG9FskjfQIPFV4OuZeWf5zPrA05l5W4X6LIX2g+eV8hZG6+qXwGuZeU+FMmYoZUyPZrvcgWZxXF76pYnR/urnmXl9u/JKmd9DwdAV0fb1QzQOOB5tH6dUqNOf0NnGF9AB2pXoUonnyvW6j6K+fOlW+8mIOAKNG6YGbkGBkT+iZfRKZt4UStT7TsXfdSrqS85H44qjy288G01bnx14rF1fUoem/i3RLMV/ov3b78oyGoUSk52emTe2KedidKncHChI9zQKYD5ecjXsj37vdMCqgyzvn6D+dl40Y+3bTeO4n6PZObdm5ukVf+OsKDg4ERrDHZ6Zf4+INdAZyylQO9sudQ38sAndLewxtI38E+1DHkDb7gMRMROazXZv1e2ky/o01tsTpU5zovHEkU3r7bvAZZl5U4WyLkQH9ZejgPFDKOCzLJr2Plv5+EaDjUsiYuLM/PeA1+ZC09xnRP12pTF32d4eyswflDwdM6J9wQvAA5n5r4iYKDP/06ac/9l3l9cDBbv+ig54h33MXdrR1ag/exEFfV5Ey6PRjm4BNszMO9qU1ei7Z0Dt4E7Ud1+ZyiuxExqLL5+Zt1es0wvokpL/1ql8ZjsUOLilTX0uLt9dBO1L/s4H2+T30QyGQfuSsc5IRy7Gtgejz1Q1gi4ToEbxJTQFd0c6vFa/fO8atCE3XpsRTZ09D+24969Qzu/RdLmd0dmA69GZqonRxjYpTWe0e7S8LkJTfvZFZ8PfQmfNtijvbwV8p4f12Rs4qvx/4rL+NkYDt63L65UuM6mrLNTpPIw688XQQd6dwI7l/WnQWbZZBiljDTQA+mF5fgeKVF+JdvgbltfnHaSMeYEnyv9nQYGwE1D+ibnK61NSouEVl1HjgHU3dLD42ab3VkWX5czfw/X/UbSzeRCdvTwSDRyeRTv7ydDAex8GXHIyjHVarKk+Hyvb+1N0cBYezVT6RdnGvlza4aOMPgC9u9EGKpZ3GbrG88NjeG/y5n/blHMSo69BnxtF5venzGgpr3d0JrTL5Xxd0/I+uGyrD6OZGBs3fXZSephzAx0UvVTawKfL8n8WzRqaAh38zdirOo2hTf4JBfkaM0TabiMof8EZA177bmmXO9B5DqBDGN2/TYuCbF8vbexj5fUJ2pRxFPCT8v810EHC4TTN5qODmW+Mue++gw4uKSvb7t/Qge/26MD8VTTLYKnymW8yyH4SHcT/vfx/EhTo3RENpDvKR1PKmJcP5jK4Dx1YHYdmMPZ6JtYR6EAaFGjeGE0pP4TR17vPXqGcQ9F+6LTyWw4py/mPaCwyW2lb30PJCQdb7/c0bQunlmU9aPtrU7fTS1v8TKnX15veWxz1n73aJx0HvFT+PynKB3IvGqdugPaTPcmz0Lzeyv/nQGd3DyzLfbny+rQdtKV7yzpeEvUhZ6Hx8odQ0O+TDDJWairrfHQgvvAY3psNBYGmqVDOtKUNLlye/x7N9jmh/I22l141lfU/+26a+lp0ILt3j9bbvsDJTe1oenQwfTWjx6SVLgmmTd9dlnXbSxTa1GmD8nqVvE2NvuQklMdkedQnnYUCjzOU9boPg/QlY+NjxCswNj0YhkEbOivwAxS1ehhlHQWdeZ4OTT2chvYDogVpml6JDspORQdtO47Q8poHDRJ/W57PVza081DE/wdlY+/lAH1xNABqvrRkQnQm9Xhgyl6XVTqZPwx47SPooO/kUuagiePQVKgfoJ3P9mgwNHHpYG9Fswb+e61bizKWQzudmQf8xmPRznHmKr9nwG+4ten5ZqUeszS91tHBQw3r/yul/f2W0YmorkFZi29A17YvRo+SfZU67dDYRppe+3hZb/uXbaTd9r9V2XltUdrB4qUtHFN2XFsBe1WszzJoquXBpU/6NU0DVrSjbbveSns7Es2UAgWxflrq9CpDvIa8i+X8C+B9FBA7r6z3LdAlZ/eh4Mr+9CiQ0VSvj6CD8lPK86MYnRT3eXRA2es6jalNrlW2kR+U54NuI6i/P5YBCaHQQe2v2vVpYyhvzdKmm4NOM6JrSY+lTUCzbO/foikIX7atz6AzkLsOYTmNqe/+KDowOQkFfQbdVlBQ9Zmm5wugwed9qN9tnKwYrO+eBuWgaD5YmLHU7zI0e6qT3zV5Wdb7oQO0e5re+wqwb4/b41bAT5ueT4gG6scA+3RQzrpl2V7X1Cb+BtyPZotcxOhgTctxCTrobM5lMDu6hrmRAPSzVAhkNH1/ReCWAW39JkYfSE7arh3VvLz3R7NDG8uosZ/cB10G+YNO21SX9VkXzQKiqU6zl/VwcIdlfQXtW5v3S6eU7e1tNGO46jp7AB28voauix/V9P5Unawz1DftgWY9NMbMs5Vl/vWKZSzD4PvuaRrLr0frbXlKwKfptQlQ8OKIDspp13fvXlOdDuuwTZ6F+vpRpY4nlP7kbhRInLV8tqeJv4f74dsxVhQRSwPXhu7x/d/bIzb9O+1Qyk3dVupn6HrI1YHXI+IxNMVzqsy8ITNfy/a3l3oD+E9EbF6eN7KonwJsFRHLDKV+XXoeTSlauCy3aVGAYwfUSS9XXqty66y6PFAet0TEd0H3787MC1D0eakRKOsc4MWI2KDxQmZeg5bPO+i2iP8z9axZZh6LduZ/RwfBZ2XmvzPzKnSQvCi6FrXlsk5NF74B3SZxuYgYlZn3ZeZXUefYybIBBb7OBRr3ff8daqfLlNd+jgJkPZOZJ6LrqhPN8PkXGhD9Ax3kLgq8nJn/7GG1/gBko28p9bwMBVpmR2dS2m0jV6LZG+uinekFaGbPfqhfmQ4d0FTxPjo43Bsd5E0K/LVM5Qctp7XbFZKawvknYIXQ7T3vycw9M3N7NM11oYr1qUXq1k3fQjOF1kcZ50/LzO+gA/WT0dmsaXpZL3Sg+Bowf9km5kfXhH4LHTwsOQJ1GlObvAJdrjR76DaM7baRZ9F+6PKI+GxTOdejvmTNDut0FerfLoiIbUtZL2XmYSjYt9hgX06N4M4DVo+I3SNi/sz8T2b+CQUfVi/TWDtxDv/bd1+Nft87VLsN1wXApBFxdRlnTI3W9xbozPbGjaIH+W2vAZcCP4qIr0XEFGXZ/B8aS3ym6g8qffVb6Izah9F6/EPTR95D/UIvXYn6kV9FxNJlP3srpX+L6reFvR31O5NFxEdQP/Ak2tf+G/3WlctnB1tvx6Cz8kTEhKlLER9H28ZipV7PdvD7pkSzIBrL/0oU/G/sH49BgbteOQJ4JiIeANZt2k8+jILan8vM+3tYn9uBRSPiktBtU7Ms88OAVct2U9Vl6EDvOxHxObRf2hIt37upvpwnA36cmbuh7XRW4PmI2KW8/wMUXKjqApRc72h0IpHMbMyGrNpXttt3Hx0Ra1Xok+ryCDB96LaUjdumvp+6fGeFiKg0JqnQd6/WQd89WJ1WjDa3Km9yOxovzo2O3/ZCbWA11PcvxOhbaPdqeffGSEcuxpYHir5fh65jG9P7+6HreDstdxSjb+ezd3ntz+iaq3voIAkdOtN9LpoqeS4luyyKHm/Vad1qWm4fRTubd1FU/8vl9b3QdOyeZK1Hsx9WZvS17Yuha68fQR3PkTRl+u9VWU1lfhIdJPwEXSYwdXn9DmD9Qb43ERpgrlieT4Ou2ToeDba2QcGbQW8rw+hLbxZAZ4V/gqZJr1Rev4XOst42kitOP+D13dAU5lVRroRetcPGLY3mLst3B7RDPhQNFP6EBtgP9qpNDqjf9o36DHj91nb1QWcHp0eD32+hQeYf0LWpN6Cz2XfQQZIuBkzVL+vrT2g6/Z0dlDMTisr/He34G+3ic8BVPVy+zb9lUjRT6L/LG2Xk3rzK8h6m+i2Kzli+i/KANF7fDQVgx5o2Wbavecu21sjG/zCaQbEZmhZ6ewf1mJ5y+V+L/m0fms7WDlJOI4HZ6mgq+IEoEDI9GmQ/NMTlNKS+u3ymcdnPJmh23qtov78HOlh4kaY72lSoy/po//MbdMnSxOhM+tYVvz8pmvHSPCtsWnQm/gy0P7+zk76khnbYWEYLMLp/2wfth9cE/lZl3TP6bOtkjL5LyP+V9fc5yjX8lGnMFcpsJLBrzAj5ZmkD5zOE8RZlBk9TeZujfBkfYQh3Zhjisp4ajWca/fRn0X7xYrTfvhtdbrpmj+rTar0djwKqn0PX/1duS4xOanknCq7fj05mrIn2lVd1UN6kfPCOWmuUvuAtlP+hbRtCAf9GG58KHUu8iQJJq6EZBJUT4pbl1PW+u4Z21Dxz9bNoLHIxuvRtB5pmMVVpB+XfIffdddVpDG1yPzS+eZ3Rd2V6AO27j+jF8u71w8kRK4iIz6Op4DuhQfA96D6hrzV9pnLSoRZ/Yyp0b+s70XScpSJia+D+HCTZS0kK9Gk04LwOdYqroiysfy+fuRtN5bl0qPXrVElkNQuK4j+IOsdvoIRWp6Od0OOZuUwP6tKcWOnv6IDqjlKP5dA9W28Gbso2yUtqLmt1lOxoOnR93d/RFLNl0eB8HtRhfnqQMg5Dy/U1lMzoQjRV6naUffYQtBPbIismsSoJa76AZofMVer498zcouL3Jyn12DtLoqpyFuX9cmbgN2gH/s3UjIhhV5bTMpRLetAMiuvRzmJJdID7GtoB9eTMzhi2kffRoPhDqD3Nhy6DaVmfiDgUTbF7C+3Q7kPtcXK0A/sHmu58cWZe2KY+k6Kg0Q/QrIv3G+ut6TP/RAdDVwxSTqADRzLzsfLal1EA8yZ0LeGXgW+lzqz1RFnes6HEY83LexW0/P5Km+Vdc31GoevQp0Rn8x5EAYTd0KUmp6Nt++nsURLJmtrkBWhA/Tg60D81M8+JiG0YfQvOm1KzqtrVpzmZ4VPoLGxz/7Y1uvzp0sH6tzIjbCmUDOvX5TcuiWZ3bIS2mVMy8w8tihhYXh1994/QQetT6FKLN1B295nRoPZqdCCyXWZeMkg5kWUgF0ru+KHyW7dA6++BzNy14u86DA2uv1T68f9QrmdH1wbPgRJJHlOlvG6V/m0V1BbPQ9vpFOiAajO0jH6fmee2KedX6OzvFGgG3m0oJ9Dn0eyna1DwcC/g+5l5+RDqOj8av91ctQ8pM1ZXQOOIUcDDWWbylFkU56H+YZte7Csj4i5Gz7q6Ha3za1FAZQ104HZvZvZkxklZbx9GY+7b0AHZs3xwvf05NcuzXVljakvrobu+jCp/YwZ0d5aW+6XSb6+Hto1rMvP18tp7Tdvhm2g/Oej+LSIORycuTkSXhlxfXp+31OvfaPs9s93vayozMjMbs6Eb++8q++66RMRp6Iz7fWjW2aWMbkeNYMjVqZmV7cqqpe+uq05NbfIZNI68ErXJHdEJxb+ik797o7vRdNyX9DsHDiqIiKvRdXRXl4OqQ9AZ5RPL+x8YXFcsc1m0g/8b2lm8EhHfQtfbbp9tsqg3lXMYGmzOhnagBw54fxd0zd62ndSvG6Fs3Luijv4ZdPB5Kzoo+RAKGkwD7JKZj/egPkeg2U67RcSSjL79znuZuddIlBUfzOz6Mrqly/3At1HU+UPooO/xHOSODBFxOoqeH44GoZOgQftTqbtXLImun315kDImzMz3yv+bB6FzoJ3rjMDzmfl6xd/2IxR5fQ0FrC5sChyMQoGVZwcbVNcplOX4etS5P40GwfOjHdF30XWNW6A2el2P2mRzluiB28jiaEf2CHBfq/pExMFo0Dk3ymeyONqhTowGIkehdvl2xTo1HzRMiIJjczYFID+GBh6DZvWPiENQgHB1lBxv56b3tim/95+Z+dcq9epWCWRMz5iX92/Q2d0A/sIgy3sY6vUrFPRJNMPsadRn/gSty4PRQcOXxqI2uS+6BOhzoctT5kNByH+ivu3NTvaVEXEU8HZmfisi1kTtfVa0zzy6YhkHoYHnfmiQOD+aFnxmZj5TfveEmflCxfK67rvLttuYZbA0mllwJjoovy4iGrfkXLLVQX9zX93qvfLb/lFlmZff9SeUJPSZiNgfrfe30N0TfhBjyCI/XMoyWgydyd8azTh4lA8uo/fb9W9l/S+C+v/l0cyXSdGB8TXAi5n5dum35s9B7lxUoc77Axdk5s0VP/9TRs/MeRlNcf5T4yRPRJyNxhubDLVOVUXEL9F+cAHUpvdGy+zyzPxRCSQtg9r18z2oT2O9fQUFChZA45vbM/P/ImKyDvZtB6PL+E5BwaOBbenrqM97NXWp0WBlNY+5zxm4XUTEhsCnM/NrFeq1BpqBdRU6I35d+Y0vZualg23jA8rZFng3M09ueu2/xyVl371hZu7erqxuRcQPUf6LTSJiHbSsl0bt5kfR2Z1daum766pTU5t8Cs0EWhLtI3/R3CYj4oDy9zYepLixV/bBtId+fTA6Kcxny/OJyr+fRQOF7YZY7uLo4OksdGZ2z6b3Vmr++23KmQ8N4kBT1s9BZy4OZ3QUdTqa7i/dg2W2GDqj9xCaarUp2lkfis4czYvOYLTNyF5jnVolVjoWnV3oeVmMObPrD1G0stI0wPK3v1va0ndRtPMAdCD8KBUSWNGUdI2mO4ZQpt/RYZZotENtJFXaAh28NqZgNgKVc9BhosUu1//caDbGaU1/fz20s36RHmcIL3UYU5bo/24jFb4/Cl3e9BN0WcIoNJviCBQQuRIdBExRZR2W9ncdo5N77Y+mJh+Pbjc7c9lu2yXpXARNZx2FZnGcQw/vmjKG+kxQ6v7jpuU9FzoArby8h6Fe86Gpo79CfffFaCbTTWhq8CLlMzOMLW2yfG8Tyl00yvOJS7s8ENitw/qMKSHWxGg6/oXAHhXKmATdpeDjTa8thaaoX0cH2cqbvt9V3122jdOAR8vzCdHB7C0oMPPj8pmpGCQBZVk332TAOIHRfXfHCTXRNeMfQWfmrkQD40+iQGTlu7zU0BYb/VvznU4a7egSyl00Krah/dCdpq5DOWNmR5dyPlnKOgiNk2ZhwKV1A8pamQF3SSr9S2N5V77bUPn83JRp9qUd7Yauv/855VKQUqee9AGlDf6p6fkW6GzucWjcs2QP139jvW3T9Nrs6FKFs2hzCdAY2tLpKHh5Rtn2tuy0LZWyxjTmPqZsN9uV9TjodttU1oTls8eiy8I2RQl8G0HWSgma0Zj71abftvaA96PUdZIerbtvoJOCjefTopxIv6GDS3iose+uo05NbXJv4LamZX8jOh48gdL/0+N9d68fTo44uAky81+Z+cfyPAHK828Cm0bEhuWMVicOAI5PRZH3ATaPiCVK2Tc2TTFqF2mcF3g0dJnDR9AZvtPQdK710XXv/0BTIHtlbXTm7q9oSvgX0U7nAjSt5yfAZFnud9wjVzLmxEr7osQ6845AWReiHQSljb2CDtaORlPw2krNEvgDOkvxaXTAODU6+LgE2DIGSRhTzgi+UqLtZOZ7JUqdqdkB0wPfjYiJKv4m0ADtrPL/a9DO/tKImDczM5Rw8WlK8p9eSJ0xvxJYPiI+g87sXIja5kPoVlO9dhW6vIiyPG5DA8Z7UWR9UKmkqpei6bcToYHHCmigeSkKHD2FBrptzzaW9ncDStC3ELrm70A0GJkF3fLqbdTWBrMVSjr4bma+iC572DBK8tiI+Gjpr4ZdaNr9+eh3bIgOgsjMJ9FZglPRGYwdelGfAeZFB4mB+u4Po2vTz0aDye+lLvN4pYd1uoou2mRxH7BFRBwbSqD479T9y08B1g8ljquk7P8GJsT6d2aej3LJrDpY/1bKeAcdJG4dEYuU2VV3ZebmqO/cqGp9mnTVd5dtdyKAiFgL9d0roJkv16Jr21fMzDfQ2f7/UWaSbYmCO/MPKP/90H3gzw5dftSJm1Dg4kDg7My8OzP/jNrC5zosa8ia+rcdI2Ktckbw4czcB20jW5XZa+3KSbRMv4pmwH0IBVOnRidYpkCBxJVTZ9H/MaZyImI5lMT6AwlBU0nV3o+Imel8eU8FPBERc6WSDx5fyr8f2DciZit1GvY+oIw5zwCWiIh9ImK6zDwN3f3qDDQN+8whjHWHpGm97RoRW0XElJn5TGYeX+qzTURMU7Gsd1HOgNfRNn8LmsEwFRqjbBUR60fErBWKm5f/HXOfji5/WB/Ntmq53Q6o13vlsyehs9NnoksM70f73LYzFoq1y+/aE+3Dd46IIyNi0fL+eujk4ZAvpe7QbWi7bbSjV1PT9U8HvhQRc1YppOa+u+s6NbXJ3YCHI2JKdEnaEiiAtBpwaEQsnpmP5SCzfMd6Ix256OcHiv4dC8zU9Np/b42Gphh1NOsA7aT+SNMtqtDg7ODy/6VQxtqq5f0YRYUvRdeMN17fjwFJrXq43D6EDlwvQxHig9DB5CloNsJSPaxL14mVhqmsaVGCtgcYcCYH7cxWr1DGxOiMx+rlO1ehA/LFUCbux9t8f010gPl4eXy8vL5803qsdI/d8vkJ0Y5z4OuHlfY45Htc19AO5kM75MfRAH+98vot5fWetcnyd2cp28glNM14QGf1r6pSn7Lud0E71wtKe9ofTaG8BwXvKp8lRFOjr6MMFJte3xk4tmIZjbt3wOgZJueifm/Z0lf16lZQfy7LZ1aUVf41NID8Znl/OnTAXml5D0P9jkKD2ZdpSqKEzl7/fWxqk83rFF3adFBpk7uj4Mgi6LK8yjPNqCmZIToLeAQKzKxMSfxX2uqtdHhmnhr67vLZXdHB6yHADuW12VAyzEFvMVi2qV3R7ejup/TZAz6z4BDbwZroIPZFYNvy2hXokplebyO7lGW0GaMTZDb6t7b3Wm8qZ0MURHwcBZF3LO20kcRs0P6ttOVtyv8XQdPeDwY+2s3yLtvJSaVtnkGZrYPGnh/ttLwalvcqaCbL0Siw/it0qcAbjbbQ4/psiC6V+g7lTHrTeuvoDDoaqx2OgtublmV/PgoWPUnF/RLDMOYuy3xLlER2DpRP4n/GUoN8v9GnTY6CiY0x90md/LYa19uy5Tf9ipJUu2y3D6ITh1XLqa3vrrFOX0JBxO+U5fyZpjZ5GCUB/Lj8cI6DFiJiRbRzPhd1pqdm5k+a3m9cQzgqFdHspOx5gReynHUvZ2F+mLr+5jLghNStQQYro/H3p0MDqYXRQfpPM/OGiPhzqfMpndStG+UauEkz87WImAzteNZHO52L0ZS3f2Tm7D2qTy2JleosKyKmRsvohfL8s6gDehUNID+EBpFLtClnYMKY1dD0tmdQIOMOqiWM+SKaarUUOpCZFOXv2GCw71VRIsTvRcSHUe6OWzLz21Wv26tDKFnYZKVNzowGD2ujM06NM0xzZeaigxQzXPVp3kZeRjvIhVAQsWV9yvWKC6H6X1DKWAsdGDfazVtoJ7vyGAtpXfaaaMe4AbBXZh4fEVegPunUimVMmpn/alr/B5a6romuCf1tJ3UainIm+kQ0ULivaXkfhQaj16Mph22X9zDUbWJ0MD4BGrh8FQ1kb0UHgr9F665X/WQdbfKbaLu6FQ2gnkCD2K+jPu1e4PrMPKJinbpOiBXKIzQzCs7cgJIbr4uubZ8JTX3/c2b+uGKduu67W2y7H0Vnmhvfmws4IHXGd0xlbIgCe58oz/dGy+W7mfliDCHn0oDyGzlgVkeD4n+j5Lh7DLXMDv9+lWV0WWb+qE056zN6n30ZChKuj5JHL4QCLqeiSzFa9m8RsRKaHr01Cqyeh2ZzvYBmixyYo2elVv2NjfHbbGjW2MooqeIfyvt/BY7KzDM6KXcoImJLdKeSW9GMk8mBOdE+ZR3UR/0tMxcZ7rqU+oxpvS1R6rQqygVzVbv1X8r6GBobP8f/tqVl0BhuaZRX7OQWxTTKGrYxd0R8GgWLjsvMQzr43hi39VLHRVBwY6d2v60ObdrRamgfcE3zcdQgZdXSd9dVp6ptspR7Uuo2uOMsBw5aKIGDtTLz0NL57I4a7E8y848R8R3gD9kmc36LspsT0DWmgR+LpgHPlJlDmhIYEbuhBj0b8Epmfn4o5QxVDJ4B9ytosLxNVsiAW0NdakmsNAxldZ3ZNVonjLk5M68NJYwZlRUSGIUSxeyemZ+KiIVLXSZGs2K2Lb+r604iIlYAvp6Z23VbVgd/cwJ0oDiwTb6OdkIbo4DPhb1ok6VOXWWJDl1Wsj06O7U12mHtW8qbD103+x80sPlbZt7WYf2GfNAwyCBmBbTzvzIz1+mkPt0IJWLcA+X8uJvRy/ujaGc/HR1k5a6xXmNqAx8HPoXW4fPoesyxpU3ujIKn+6FlOzmaWXZNZv65TJl9Mku2+Ar16TohVihZ2DfQmfI10YHCV1FeibXQvujdzPxdlTqVMrvquytuu+ugmR2D3bViE5Qc8vZQEtM50FnQt9AMyCEFDaIpSe6A16dDSeOGfbBYcRm9le2z1e+I7tzyG3Qm90U0LfkOdHbw86jNv0yb/i0iNkXtsXEHhNezJD0rfcwcmXnAkH7wmP/eLsBGmblmXWUO8rd2RrkMfoqCKv9GB2c3ZuatoUs0DkVBqVt7UJ9W6+1mNFtsCXTy6Z4KZVVpS6ujmQwfHmJ9hzzmHri/jIilMvOuMb03xLp9GQUYh/TbOvxbVdrRczlIou+msmrpu+uqU9U2iYJ/m2fm2u1+41gv+2DaQ78+aEp2gy5R2BolkLkP7bjr+BuN4M2P0a2T/mfK4SDfGYWmgTaez4Ouu1qRHiafK3/7IEZfn/tllMfhx2hDotTrQ71ab9SQWGkYyvohcFb5/zpowHk0JXkcFabeoYQxfwA+0fRac8KYFSuUsRQ6cG5M/fwWun71THRd3QSUS2fqaN8MMdFiDX/7UOD3TW3yoEabLOu18vS0murTbhsZtD5lvfwFHaTMgC4N+Rq6Vv44RiehrG05o4Prrqc5ost6Ok5EN8S/tXPpA3+Kpia/AJzctLxPKttQT9f/gDYwzYA2sDOjr5ntZeLQrtpk+cyPgM83PV+m9CnHUi5b6aA+XSfEKtvJ7Xww0fCeKCBzEENLGthV39287Ta99oFtt2xr2wDLDFLO7mh2yiQDXp8RTbs+kkES/I2hvJlQsGfSptdGMcSEf122xbbLqGI5gQLfazS9tikK9JxEU4JD2vRv6DKG09DByurostIlmt7fGZ1lrnM5rAQs3qNlfhw6qBpVnjcSxp3A6GnhM/WoLmNab5uMab3V1ZbQQX/H41KGOOYu2/mHaLrMpmxvjcTUlffdTXWYsMX7Q/ptXbSjdZuer41miv23HXWw3mrpu+uoU4s2+YWBbRJdDvktlAtq2Jf3SD9GvAJj2wMNsN5obpAVvjMw6/H/bOjorMGOFcpqZAkf2PFM0KrsHiyT6SgZcJs6s+YMuBuOQJ02QtcyrkXTIAtFoI+ngwFRXWXRZWZXNPXsOBS8uhOdNZ2waZnvh67Zb3ntJ4qM34AOqO9GMykWLJ317UNc1gsP7ND5YMCgp20SXf92HJo6dj2w2YA22cgJ0JMsw03LY8hZopvW8VHorNkxwN7ltVFoyu026ADiewPXRwfrreMs4WiHufHAdd1U5wmq1Kem5TwLOmM+f1OftBaaYng52uG/BuzcyzZZ6jYRusb2qwPawHaMQD/ZbZts+s5maNr3Jwf81v1Q0qhKB3xN390S5cNZZEB72hNNC69Sxo8pd0Nqem06dBnIJzupT/nukPvupu3gWJQsdPKm9yZq2nYH7SdL236CcocLRt9WbuHyfBEUdJmx4m9aFs2YOBPdgWVgXzAzuh542PvJpmX0fWCvAcvov/1bB+XtgS5VWp3RuTImRAcNBzTKrbC8H0Enis5Gs7imaCyP0p5uB5buoF7bAh8e8NqEdHEnjC6W+eLoMsWn0KWkjRwOE6EAyenocq5eBo/2KOtongHL57/rrYO29PNS/zG2paq/C826WnSQdVZpfIMCoFeUPujIMbzfyb57TMcBzXXq2Torf297NN5arum1Rjs6seoyKt+rpe+uq06lTZ6OgliTjqlNUsPJlbHpMeIVGNseaOrLHzr8zhToftzNDTgYfXvHSrdLLB3PBeig4SaabuNY3u/Zjr7pby6GZkociQ5EtwKmbHr/y2haf0+i1gPqtgs1JFaqqyx0JuFv6Dr75rMeH0OzF+Zs8/1L0G0Xt0SBg8v4YMKY5dAgp+WOBwU69in//x46wD4C5UdoJAwc9JZ7A8pbHiXhG+PfRGe0et0mLyud/ddQzoeX0MB42aY2eTkwTY/b49poOvPAbWRjFNVuW5+yrO9FZztPQmc61kezoc5Hg8G2CS3rWm+lT3qx9En/c3YazYz4ZafbWxfL+Hjg2+X/26IzFVegM4e7l23km1WXd431WrK0wQtRlvQvD2gD2zIC/WQdbbJ8fjuUMOprNA2yUSBz2g7r1HVCLHTL5BspMyeaXt8MHTh0mhCx2757ORTwOxcNaEcx+gBnVZTI8zwG6XvL904s//8wCsqcVtrVTuX1ymeqy/caSRl/jsY2v0P3oAedJOl1gs7lyu8a0zI6v11/1FTO6ijIciM66G+cGZwD9f1z06Z/QwcGjeX6S5pu/4kOHHamg5l5ZZ3dCsza4v1ZSvvoyUwodFD1LdRn/g5dpnQ12qdMUuq6Vo/X/4zorPCe6LKgqQest0rJVdH+7bY229uv27WnpnU2xjPUZZ1VGt+gvn83FOD7DZr1eQhl1ioKAlbZd7c7Duj5mKv83f1QYPQT6PIdSju6lqZ9S4Vyauu766gTulziOdR/X0GZ0dXUJuelh2Ocfng4x0GHyjWFU2Tm6x1851SUQGk6dHb3u5l5RdP73wceyTbJx0rylQvRNVsLoAPZqdBMhSvKtYhzZblOqhdCiR3PR9cPTYYOaG8ASOWHWBh10otmD24HU1dipbrLaipzWTTVaWp0DfDpJUHSVWiK6hjzJETEemiq1nfQNfoTo53Pk6iTnQsNhi7KFgljQrecOQNYJzPfiogH0BTge1Hil1GZuUvV31LKPBfd+/nYiJgHnbFcEiVCvK3XbTKUePSYHJ047Eso+/gr6EzzFpRlj84U9eoWRY36bYgGtS+iZXR56NZpg9YnIiZHM1++is5+Ho4yL9+OMvCvjpJablqxHrWst4j4A2p/86MB52aZ+WBTIqnpSzl3VqlXN0K307oPHVztERHnoGuYZ0fB279n5peqLO9hqNuf0KDjz2jdvYa24Ssz84iImK+835N+ckDdhtomF0Xr/E7UR34K9ZfToQH1u8BrmfnVivXoOiFWyRn0bml7q6DA6BsokPgCOvB/ITO/VaVOY6jfUPruyVEQ5g9o+vQx6ETCY2jbWRL1T79I3bqy1d+fC9g3M7ePiCNRwOcSNHNoc5ThvdJtjiNidjQDYvPMfD0iHkNn1Z5HZ/x/nBUTWdahLKPJ0b51HrSNjEIB0nfQQcMr7fZPJf/FzCgQ8lcUbFoZBUnmROOlh9FvnKdV/xYRC6DlsVpmvhsRa6PkdWdn5n5Nn6ucHDsiTkQJQo+JiJXRNrcx8LPUrfiIiAUHawN1Kdd3/wYF+v6GgiALoinXC5bXns3M7Ye7LqU+jfX2Ejqp9n3Uh1yOAokfRn3JNyqUNTmaUXk+6lOPRv3/rWhm1IdRO/tVhf3b6aiPPrZs/8ui3D/HZeaFodsyzlehnPmBozPzk+X5w2j229Mo38YhjTZQ4ff1zXFA6ZMWQkHvl1A7mg+dsJmxPF6v0o7q6rvrqlNTmzwCtcN5UADir+hyvNdRe/o2MH8vxjj9ou19cO2DUsmDOgkabIIiU9ujM/OnA+dFxAVoQ38J+FHqPr6DlTMdSjh1fWY+hyJgK0bE1sB+EfFOZl5Li/sQD6MF0cDxXjRoPxAl+nollLH/aeDXPQoaNCfD2R2dWdwXncGonFhpGMoamNn1UjSI+WRE/BAtu+NaDTyLVdBUybXR1Os30QHj6ahzXQs4b7BBdWY+FRGfKkGDqYCTs2TwjYhLgf+LiAUy85F2v6l8Zx10hvub5aWfozb6LLpn+0GZeQM9bJOZ+XhEzBIRN6Adz7/R9LSb0EyNB9C09RN7eNA4MCNvoMDTJhFxONpGWtYndKeDvVGSv4nRAdpJKFD3UHn9aXQQUqU+H0fXWe5cXhrSeouIzVH0/Ufl+SFogP+DsvOP1D3uh/0+5ACZ+VwZ3J0aEW+hKbhrooHiCuh+3Q+gA7Zerv+Po33AjKUeS6C+8l10v/Zd0QCkJ/1kqVO3bXJ9FMi8As1cugid2b0JtcvV0AHaxRXr0yoh1qmMToh1ZQ6SECsiVkMzw/4dEW+g5LUfiojtUO6G+9FByN5V6lTK7KrvLtvuj9DMvKnQAcwb5ffNh66PvhVNE//PIPUIFNiZKCJuQ/v/fcsJjCcjYh+0Pi+v+NNeR/k/fh0R7wEvZ+b3yt+6BvhqREw0WJ3q0tS/PY0CRJehuxWsh6aJv14ee7YpZx2Ui+IlFCy4E82keRotm9dRv3dMZr6GAi+tPIESFL5b+rHLy1ju4IjYKsudYaoGDYp7UdsBXXJ2FAoiHRQRy2XmXj0KGnwabbv/QQdBxwE/QW1zRTTueQP4RiMAPMz1aay3e9B6uhhtx8sw+g5ID6DbX7Yra02U22YhdGZ5ErTeJ0Tb2idQIHLQ7a2UNREaY0V56Vh0CddfgGMi4ieZ2QjgDSozH42I/0TECeg44JVGEKTsk1aMiLPaLet+Og5oakfXoeDHGYxuR0ugmRV3oMsC2pVVS99dV52a2uSjaIbKg6V+T6AZBi+itnpeZr6B+prxR/bBtIdx+YGSKO3Z9PyzwA6UhFgdlrUNip5OOuD1HWhz3+dh+m07osyiP0Qd1dnoLNFLaFByARWSPdZUl1oSKw1DWTujs2cbowRtx5b1tXx5fzlg9jZlLIk6rq3L80CDqNfRQHsTdGeAbpbf2sANHX5nGRS42Kf8e0Z5fSoUQNqlx+1xKTRd+m/oIPoddJB4AJqRcSCanrhED+vU2Ea2K23qbJSIci50Znb1dvVBA51DyveWY3RG3w2aPrMrOoCpUqcl0BTnA0tZvxvKekO3MVu5/H9CdFB8LTpz2rP13mJ5H4oGCI3lvVl5ve3yHoY67YsOGrZDg8yHm9rA19FU4bGtTf6RkgMBnSnep/RFewyhPrUkxEKDy83Rgf4GKLj2f5RcHmjwWPlaVOrpu29Hg91fojO8z6JA2kfL+9ugAEAny2uL0oZuQn3exnTQd6Pp6V8s63vXsr73a3r/a8D5PWyPt6N92IKM7t9ubCyjDsq5rrTjrdH+7Ew04+jips9sBXy/w3KD0cnrNkN5czoa16AZWQuUNrABcFjTezOVttqrSxRuQ/eb/wYK0rxS2uXXyvu7oZMKvVr/jfU2AZplcCY6O7zpENvSHmi2wc/ROPRlNFO043EJCjYcVraRA5teX7qU3zaXAApWbFK2taPLb/1+0/s7opksVeu0DeXW2QNe7+lxQGk7G5X/T1F+298a7ajDsmrpu+uq04A2eQIaR74FPNr0meVK+T3LS9IvjxGvwLj+QFGpRxid6fs2dEZ+wdJBVsnEOj+6lnI5NMh7nnLtXXl/ZzrMu1DD75oPRau/WJ5vjgZHz5WNbjpK8rYe1KW2xEp1llW+01VmV5RH4abS+T1GScSEDtQWQUGaJ+giey4a+N9JZwk/lyjteXU04+BcPphtelfg2B62x9nQYPPZ8ltmRNMut0RnCU9D0eOeZb2lhizRZTs6u/yuW8s2thzauZ6OdqyzoGvJl65Q3mdQwGdCdPBwHiX3QyfrDZ0N/J/BM4rGn4sGMT3doQ6yvC9Dgc2Ne1mf8vd3RGeB1yjPJ2tqkyehg7jT+mAZVWqT5fsToEDWz2hKxIfOqv+VpoPQDurVVUKssg38oen5JOjAeDs0yK6UR2hAmd323Y3keV9DuT7WRwcb25f+aGGGkJW/LP+50EyPv6BreD9W8buzoIOp+cvzidCssavRfns3dHa3J7kNyjL6HeX646bXN2gso4ptenJ0FvEoSiC0vLcGupyjsf3t1O32hgKBH+/g87ujA9nGXXDuQuPANdElK9uiSx6He1lHaTcvlnZzJgpmrIdmZ72G9jWP0rRPGOb6NNbbmgPeWxVNx1+jw7Z0Jbr19/aobz0PBUga29uuVNu/fbKU8+VSv1fQpVvzlve3By6oUM5sKID9m9LOF0WBor+iy9a2RSeFlq5Q1kJoJs2H0ZjwcZoSs9Kj44Cy3iYq29rX+GDC8GXQyYMtOyiv6767rjoNaJNboFwZq6Hg/lfKulqjfLanAdZ+eox4BcbVB023VaGczUXJnr7Z9Pot6FqkwcqZrXQyV5eO52Mo2nkburbyF2ja6zI9/G2zlfo8jK6x3xndXuy2UpfKGXBrrlctiZXqLIsuM7uWDmsfNFPlSXQQcixKkrhS6cieYIgHaWgws2hzu6y4/m8qy+d6dNnG5MBU5f1p0UHu0j1c943ltEdph79Dg/y9ynK6Ck117nWb7CpLNNqR7YsCRz8p7en88lvvRdOgf4imubcra2o0bfd2NJ1znvL6ZOXfGaust1LOM4wOZCxdXm9kG/9sWd6Vb581XMu7LL950EFRT/skNCD6GzprfgKjB52TlzZwKgq09joBXR2Zy+cr/dAXUICuse7nLNteR2dP6TIhVqn/yWXbmKPp9VnRQcNOndSnfLfbvnsWlNvouqb2uByaJXBAedzRbntr8zc6uvUqH0ywuDLqw39TlvG96GBrjR63x33RbJXm9jhJWT6Vg1AoSPfPss2d37TMv4LOGs+M+r4hbW8MIXN6aQMPoWn4Z6Hp1kugKdRnoOunr6RHM45Q0rzfoLO5v0KzQn+BZmhdioIsK/WiLk112rKsr4F3Lmist0ptHI1LnkX7uItQEHuO0lcdgGYb3NZueyvl3I7OJp9d+pA1UEDl92jsdTOwZMXl/d3y/z1Rro3foEDpFeX3faZCObOjMffZ5XcchgLAL6MAxC/L9jvob6t5vX20tKOPZdP2UV4/jupJw2vru2us085Ny/ZAdJefb6HjmxfRLLKeBVj77eHkiMMkIn6BdhL7pq5FIiImzZLLoFxHOlNmfqlNOScCD2XmwRGxAeowVsjMV0PJ+95G1ydWui69Do06oevzfosioBugs9/vAO+V19fPismauqxPLYmV6i6rqcz9UOd4LXBvZj4dEZOgHccnM/PNFt+bE50V+HgqL8HjpW7/QDvAV9AZi5czc/eq9WnxtypfyxgRxwOPZeaBEbEvWk7vogj4Jeis9qyZuUM3daoqmpI+ouXzEApsnIeCIv9CA7Setcmmus2IdjwPozPxz6eSkc3RSX0i4lfo7MdZ6EzvsegM73Uo58GobHONdGZmROyCpmy+hM6A/gVF1XdA15TPPdh6G0M5L5RyrkOR/e0z87WIWD4zb233u+pW1/KuqS7Ho8DeCehMyPQokHE9apufQ2c1Fhub2mREzJCZL0fESoy+Nv0C1D+ugQav61esS63JDEvegZXRpVtHlNcOQNN6h5IQcUh9d/nu9mjG4cPogP0dFByZDu1TpkJJFr/Tab2GKsacYPEKdHC1NnBQL9tiU72+hy4j+GNm7lVe+yGakfn1imWcgKYnr4Pa5Oko2Lof+n33oZwsHbeDoSp1ui0zjyp9+KOZ+dPy3vKojb+fmU/3oC6j0EzKCdGB7/+hccQxKOD3KeCZzNxkuOsyhrrtiA7uz0QJoP+J1ts8mbl1xTJ+hU6uTICuR38djQPeQCcz5kfJXwdNsNgoJzMPiYg9UY6VR9BU9SnRSYp/ZuaDbcoZVX7DqMzcOyLuRfuCW9Bs0VnR5Q/vV/htR5U6/ajk8fkJ6kteQNP8XwDeblenukRJChoRnwX2R/v/X6Gx1lYoCNRRO+q2766zTmV5z4kCDpejGWP3oFkr06K+5Y7MvLqDnzjOcOBgGETEimhAdi4aOJyRmQc3vT8POtA+ITP/OUg5c6AzAVtl5pPltaPQQOqHoayfy2fmJcP3a8ZYp9+hDvEeNBC+DHXWv0Sd4erAg+066JrqM6bESkfwwcRKkwK7DnZgNQxldZ3ZtSSM2QPtsP6FzuJtURLGbIl2hHcAn2tXn7pE6zsz3IWuB3wvM78dHWSbrqle86Cgwdtoilsjs/u1aErZg2hQNOxtstSnqyzRZTvbFP2mh9FgYxM0KF4IzfB5C22DP6naB0TECmjQsRdaVseiHeSOmXlO1YRog5SzZ2b+X5W61Knb5T0M9ZkVDejOQVmXT0YzNSZmdHLdF1AfUOnAqIY6ddsmF0MJyl5EA9+fZ+bvI+IrKNv4jCiA+K3MfL5Cff6bEAsN7i/PzLNL//ZFtPxmBL7Uqk1GxMzogHdGdDD0T9R3fxH1Rxehfe2amfn3dnUqZXbVdzdtu7OhGQZfQ2fVF0ZB1ltK3VZBd1N6r0q9uhURgc7kH41mLT6H7oLyenn/apTYtGqCxW7qMqb+DdQm10BnHz+GLhV5apBy5kVT0adCAcxPoRl4Z6BLAe5H7evzJejTs/1StL4zwzmZWTlBZ011WQidKZ8QBfYfQkn6/ozawpvl9a0y8089qM+8aL29V/72H9HlEoeX+tyJZmZ8vl1QpbSlzdA47TV0Zv8i1M/Oic4ML4G2xWcGW/8VDvYXRv1b1ZMsi6Lx8rNoyv1qTX/nL2h/eX2bMiZE28W7mfnD8tph6MTRYsBfMvOYKvXpVlM7eh3tz05GgfDvovX2BtrHbJOZz7Ypq5a+u646NbXJ99Es1XvQ5WCHo23jKTSL5QeZ+YvBftu4zoGDYVACB2ulbkf4MXSN20xogP/HEl29OjPvrlDWEsDjjTMbJUq9ezmAPBddZ9WTTqP8/XXQAcPtaKB5MYoS/xBNudsGddSf78UOOiJuR4PZ21GnsSe6bu9bnUYD6yorqmV2fQFNGW05aIyI+xmdJXoydIZyErQTmRedNVyp6k6sLhExdTlDORWaRta4M8PUaCrd9pn5eA/rM6aszHtRzoCjAcVv0D23e9EmB6tPI0v0O4PVJ5TZ/Dp0oPkPdM3fc+is9cfQwGoPlIOjo9lGoQz2z6EDxvvQ1Ou1UVLDykHIFuV8DGUtv6iTOnWjjuU9THU6FAWw5kbLad5Sp/XQGZBzgO+MRW3yAjRd9v/QTJ7D0WBtm8y8v3ymcib+Nv3bY6jtvzVY/xYR16JB51wo4Pt6+Y0XoQz606OZAo9VrFPXfXfTtjsZGgDPhfISrVL+PykKsu+VmVdVqVfdImILlBvhFRRoXRgdxKzco78/pv7tLnRgvThaRs9UOPj4C2qTL6J1tAqasn0gGvzvha6f/vXw/JJB6zYKzSp9tmmm1ofQGOMPmXly4/Ue1OVStJxuQGdPd0fB50vQdrwg2vZXGO66lPo01tvzQKKcAo31Nh06EfVGZv6jQlmNtjQLSkT4LrpjxAKoLU2OzjzvmJk3VShvsIP9K1GffV2Hv3cWdCD7NrpkeSF0G84VK35/KdQvvolmP2yUmcuFbuO7N7BDL04ejaEd7YwOqHdjdED8/Sqzlurqu+uq04A2ORu6VOZltJ/eCO23pyvlf6NXJ+v6UvbB9RLj4oOmbKvousit0QD7fnSWoWo5MaCcqVGndiBwyQj8roEZcM9CHey1va4TNSRWGqayus7syuAJY34N3E0HyQx7tD46vjPDMLXJRlbmL9Fh0sdhrk+lLNFoJsm1Tc/nRmfTDkFTuc+hXKvX3D90UL9F0UDqVuCI8toCI1XOSC/vHtRpfTTgfAadZe35dlJDm5wOXZ+7/IDXd0Q5eFbusD51JMRaDLiu6fnSKFHr4QxItNhBvbrqu1tsuz9FB+hPMIS7IAxTexhygsUa/vZg/VsnuXbmRCdgGs+nRsGd/VCwZyKGeBeEYfjNXd+ZoYu/3ZhVNF3Ta1OU5XQfo2/J2jYJaU31GWy9/ZgO8jW1aEtbogPSSxg9A/amIS63I0q7HHI5TeXNUH7fPUNZ3ijIuxM6IF6uvLY+cPMIt6M9UTB57g7KqqXvrqtOLdrk59E+7wpgxV4v735+jHgFxqcHmk73Bl0eyKA8B+/TwyRGZcc3WAbcx3pdp/K3a0msVEdZZRnVkm2W1gljZi+vndXL5Vyhvh3fmaGGvzlYm/wIGhD/vE/qUzlLdNnxXU7TXUlK2Wugg/SNy2ttbwU1yN/4HF0GIOosZySXd4/rdBGaAt+T7aTOZcTooOUEA17fGfh2h/XqOiEWGjTeAHx1wO/9AtofrdZBferKyj2mbXcCdJnR0yjg27OD9Iq/vaMEizX8vXb926c7WGenAD8e8Nqi6DLRzcprHd0FoUfLoKd1Km3ulwNemxbNfPkuJXFrj+pSab112ZY2QLO8HqeLoAhdHuyPobyp0eVKXS/vUtZdwKf6oB0dTucnxurqu7uuU5U2ORLLu18fE2C9tAFwaWZe3GU5xwCHZA+nOaa8hQ6sdy/TuRrvXVfqdDOa6tMzmXkA2nFcEhGHltfeQdPfZu9lWWUZ/QedzVsDJXppJJS7A+2gPxYRE1co671UUqDbgRMjYtfy+jMor8Hjnfy24VSuwZsbOL6Gtl1ZmzZ5Dbq93LsR0ZN+rsI2chawQbv6pPKe7AIsEhGHR8TKpeyr0KUgi5XPDXmKe2b+Adg6M/9drvvNkSxniH+7luXdqzqhs4yXoYFRT/LS1LyMzkAH/E+VvAYN76JbVnVSr1b923MosDJPhTKeR4nUlo+InSNisfJ7z0BJHpfuoD619N0ttt33M/MsFJQ4KzO/n5l/qVq34ZYVkrPV/Pfa9W/LVywn0VnqeSPi3IhYt5TzABqHrFI+d0BmXjYsP6ZDJc/ESNTpl8BsEXFXRHyh1OFVNNtg8ezhpYVV11vFslq1pfNQkPNsYLvM/PMQ6/oymkm7XjflNJX3emY+UdPy/ifww+zhZYG0bkePoZM1ldTZd9dRp4ptciSWd19yjoMeKgdYU2RJRtRlWRP0eoff9Le7zoDb5d+vJbFS3WWV8rrK7FpXwpiR0KvrNVv87RFtk3XUp1xDOQ86i/IAyiHyYbQTnRBlrj8M2CQ7vMZyXNdv678f6zTENjkl6h/fR9fXXoiuG/41mj13J7queOvMvLlCHbru38p+dFqUIf+hiPg8Otgcha5tvhplPt8k2yQeG1DukPtub7vt1bWMSvBm2fKdR9G+exXUhiZBwbB90GU4ldf/uCYiJkPbWqLLkh5CAbGD0LI7B+Wk2jwzb+xBfWpbb97eeqfOdlRX311XndyXDI0DB9ZW1JgBt6b61JJYqc6yor7MrrUm+xpX9WGb7Lo+EXESo7Pb/xlNj34aHbisg64z/0tmnjuMP2Ws0G/rvx/rVFObPAUN6l5Ad/FYBjgtM08N3YXmHXSHgXsq1qnr/i0ifo0SD66GckecjGY9zIMyp8+KZvadWrFOXffd3nbbq2sZRcRv0fpfEOXXeBbdVvAOFOSaCLgze3B3iH5Wtt1J0MH1g+XfCzLzvIjYDB2oPZ2ZN/SoPrWtN29vvVNnO6qr766rTu5LhsaBA2urzgy4NdRlfuCUHJ3pdm40uFsVeDYzfz5CZXWd2TV0u7PjM3PV8nxpNNthPpS45Y9V6zOu66c2WUd9ImIR4NzMXDR0b/utUFbw54Dj+m12yUjrt/Xfj3WqoU3OgQZxi5fns6CD+03RoOyADuvTdf8WusvQWZm5WERMju4ssgpwDcpS/k6nM5+67bu97bZX1zIK3eLwwsxcpDz/FLACCvj80WeXJXR74kualtMyKOi3PHB9Zp7e4/rUtt68vfVOne2orr67rjq5L+lC9kGiBT/690GNGXBrqk8tiZXqLIv6MrvWljBmXH70YZvsuj4o2/rV6HrTxmuzo+mW99LD5FX9/ui39d+PdaqpTU6Akipu2fTaRGiAdi6wdod16rp/K33zJcDMTa/NB/wezRKYdQh16qrv9rZbaTnXsoxKu74S+EjTazOg69zvbS5/fH6gRNwXABs0vTYl8NmyDa7Y4/rUtt68vY2d7aiuvruuOrkvGfrDyRGtnaeBJyLix6DkLmijOhPd0/rzvaxM1pRYqc6yUolerkFn8prLPg5FwT/VQTl1JYwZl/VVm6yjPpl5F8qn8eWIWC0ipsjMZzJzdzQV80PDVvuxT7+t/36sUx1t8v3y+Z0j4uiImD8z/5OZtwJXoVwEldXRv5W++RZgi4iYq+QleCwzP19en38Ideqq7/a2215dyyiVZ+h0YLuI2DwiZsjMlzPzSOA8dOZ5vJeZb6Bt6usRsVdEzJ6Zb6Zm9VyArg/vZX1qW2/e3nqnznZUV99dV53clwydAwc2qMxMasqA242IGBURC0TEUui+2seg+2J/LSJ+G8qmuhM6e9SzspoMObNrREwYETNExEKpbNuXArMBX4mIgyLik8C2KAP5eK9f2mSd9QklDfoZujZvM2DTiPhEmYr5KXSdudF/678f69RtfSJizoiYOpVBejPg78DxJcC6ErplYaU76NTZv0VEAEejKaX7oTsdLFneXhtdgtGprrJye9ttr45lVPbbE2bmseia9mVRUGvriJgW2Ah4bZh+wlgjIqYuB2VnoHwmkwMHlYOseYEt0LinV/Wpdb15e+uNuttRHX13XXVyX9Id5ziwMYo+yzYaNSbDqausqC+za63JvsZVfdgmu65PRGyPbmW5Uvn8GcBa6IBlbjSV+vbM3GVYf8xYoN/Wfz/WqaY2+T3UNy6OEio+gZJFvQ7sju6E8GBmHlWxTl33bxHxRWAOlMTq8sw8MyK2Aj4D/Lu891hmblOxTl333d5226trGYVu17kQmu58PAo+zVle+xgwDXBDZu4zHL9jbBER30FT+VdDy+l2tL1OhQ6qJgJuzsxDelSfXalpvXl7650621FdfXdddXJf0j0HDmyMoo+yjUaNyXBqLqvrzK4xDMm+xlX91CbrqE8o+dy1wPboAGY/dEB1QGZeUT4zI0pg986w/pixQL+t/36sUw1tcmZ0V5l50AH9WsDCqF2elpkPlDM+71asT9f9W0Q0rkX9Ebo7xB7orOJ25XfNinIlvJqZb1asV1d9t7fd9upaRhExG3AT8HF0K7dvorsgnV3OPBIRUwP/zMz3hu0H9bmImBVt60ugGT1fQAfT96Bt95WImCwz3+5RfWpbb97eeqfOdlRX311XndyX1CT7INGCH/31ABZAZ5Qazz8F7Av8BFh1BOpTWzKcuspCA+vmZbQM8GV0G8cvdlCfNagx2de4+ujDNtl1fYAtgT8MeG1z4EngCGCSkV7u/fLot/Xfj3WqqU3Oh64TbU4WuDiwf+mnZuqwTl33b2hwd/qA17ZBB/y7ABOU16Jinbruu73t9m4ZlXZ8wYDXPg3ciM46TzHSv7UfHmiW0MXAqKbXVgVOAS4Epu5xfWpbb97exs52VFffXVed3JfU83COAxuTd4BnIuIjAKnrXH+FblF1bEQs3svKZI3JcGos6xXg4YjYoJR7B0qoeAXwzYhYsWJ9rqLGZF/jsL5qkzXV51zglYj4bzKfzPw/YEl0BnTG2ms99uq39d+Pdeq6Ppn5GBrU/Twi1iqv3ZeZ+6M8B2t1UqGa+rffA2+Hbp/bKPcEYBMU1JiivFZ1VlYdfbe33fbqWkaXona9XTmrTGZemJkrAW+hQNR4LzNvRpcn7R8RHyqvXZeZWwIvoUuYeqnO9ebtrUdqbke19N011sl9SQ0cOLD/kX2WbTRqTIZTV1lZU2bXiGFJ9jXO6bc2WUd9UpnuLwEOjIg9ImLKiJg8lZhtqSpljC/6bf33Y53qqk8Jot4JfCoidm0EItDlD1N1Uqea+rfngYeBKyNih6Z63oVmC6zaSZ3q6Lu97bZX1zJKXRbzG7R/3iEilixT10GX0SxVf+3HWvugW69+MSI2aBxkAfPS44OiOtebt7eeq6sd1dl3d10n9yX1cI4D+4CIGIUCgO9FxFdRwpC3UIbpc9H00u9l5h96UJfakuHUXNbUwFuZ+W5ErAOsDsyFztT9Dk2p+l5m/n6QMmpN9jUu66c2WUd9yoHTtOiewdejRJ0/K6/dgKZSz5uZqw/vLxk79Nv678c61dAm10UDudWBI1Gy2GXR5Q9routQXyozBarUp+v+LSLmQ4mq3gUeKb/pOJS88AQ00FspM1euUqdSZld9t7fd9upaRqG7d8yC1vN5wIvAd8v3X0fXR0+emT29tWC/iYg10MydD6M7RD2DksUtiLazqYC3M3O9HtWntvXm7a136mxHdfXdddXJfUm9HDiw/4o+yjYaNSbDqbmsrjO7xjAk+xpX9VObrKM+ETEJcB1wH9p5rY4OVg5A7WoxtCO7PzN7dsusftVv678f61RDm5wYTQPdDQ2uNgXuBk4DbsvMf4WSyj6Vmf+sUJ+u+7eynVwCvI0ukZgZXZt6bESshwaSj6Fg713t6lTK7Krv9rbbXl3LqJRzG7p++V/ouug/o2ump0PB/omARzLzyeH6Pf2ubLsPoPwckwFfR8v7h2gK97ToAOuJzHypB/Wpbb15e+udOttRXX13XXVyXzIMsg8SLfgx8g+UqfRJYBF0Vv40dGb+C02fmRqYsEf1qS0ZTl1loQHvM8D0KBfCD9G0p52B6ctnJqtQTq3JvsbVRx+2ya7rg6LcZ5b/T4mm2f0G+Auw/Egv83569Nv678c61dQm1wP+1PR8AhREuAP4/hDq1HX/BhwInFL+vwAKgByNbuU4yxDq1HXf7W230nKuZRmV9fLHpuejSjmPAxuN9O/slwe6K9R5Tc8nRAG7F4AdRqA+ta03b29jZzuqq++uq07uS+p/OMeBNSwD3JWZD2bmjZm5BXAisHtEnBFKIvh69u4WJXUmw6mrrLnQrcpez8x7M3M/1BmuCJwaEVNntVsd1Z3sa1y1DP3VJuuoz1/QNHBS11U/DuyAsgN/YXirP9ZZhv5a//1Ypzrqcz3wTkR8MSKmycz3M/Nw1B+tEBELdVinOvq3m4AnyuceQbdvPBSdqfpqh/WBevpub7vt1bWMLgNeiIjpSlnvZubX0FnHT4ZyEpkSer4WEStGRGTme5n5HWBdYNUyk7KX6lxv3t56p852VFffXVed3JfUzIEDa+irbKNZYzKcusrK+jK71prsaxzWV22ypvo8CiweEddFxNKljPdQsraVImK1Yar72Kjf1n8/1qnr+mTmK+i60/WADUJ3P5g6Mx8C3gdW6bBOdfRv96JB3VkRsUgZND6BzhStHRFLdFKhmvpub7vt1bWMngTeA86PiOUbL2bmxcCiVEw+PB54Gk3D/j7wCdAU78y8DU0RX6PH9alzvXl7650621FdfXdddXJfUjPnOLD/iogV0PSwO4E/Aq9k5tMRcS3wy8w8vQd1qC0ZznAk1omI6dE9zd9A9359JDPvjYirgRMy86RBvlt7sq9xXT+0yTrqExGTov727fJ8BxTxvhU4GAWwDgSW6PEZ9L7Wb+u/H+vURZucE13fvxC6dnRJYC/gVTQddCqUGHHZVDbqdvXoun8L3fVmgsz8T0RMhbaRNcpvOwxdknEisFin28lQ+25vu+3VtYwiYhqU/4LMfDUivgz8GJ01PA6t/52ApcbXZQ0QETOh67JnQjNpPoWu/b8b+CswOzorv0yVbbeG+tS23ry99U6d7aiuvruuOrkvGT4OHIznoo+yjUaNyXBqLmsNuszsGsOQ7Gtc1U9tsq76RMQv0NnWe9FB2QVofe+D2uIDwF8z88xh+yFjiX5b//1Yp5ra5DXorN4EwEeBgzLzN6FLuuZAlxTcmpk3VahPXQmxfljqcjsaKD4EvAN8ubx+C3BFZv62XZ1KeWvQfd/tbbeNupZRRJyO2s4T6ODhaOBmYG/Utp8Hrs/M84flh4wlIuIitF28htr3YcA5wNYo2du/UaLPy3tUn9rWm7e33qmzHdXVd9dVJ/clw8eBg/FY9Fm20Yj4Ljq7tWlETIlyD3wXTSfaPTNv7XVZUV9m1wOBeTJzy4hYAGU93wyYHPh2Zj5f9beNy/qwTXZdn4jYC0Xev4Kmac+L2uFtmXl0REyXmf8Y3l8ydui39d+PdaqpTX4F2DwzG1NAlwV+jc7QbJuZd3dYp677t4jYG20nu6CzTLOgSyVuyMxzI2JWdEvISmdQ6+i7ve22V9cyiog9gHWAzwFLoDOCnwX+hu6ENEFm/ms4fsPYJCJ2BDbMzE9ExCzA0sB3gJeBXTLz2R7Xp7b15u2td+psR3X13XXVyX3J8HKOg/Hb9sDfMvNbmbkvisL9B7gHZa29NjOv6tUAnXqT4dRV1mbAPZl5eGYejK4Z/gcauG+Tmc9l5q2DBQ2KupN9jav6rU3WUZ93gN9n5rOp+8OfiiLoS0fE+h4IfUC/rf9+rFMd9XkYeCwiRgFk5u2pvAgnA98o0047UUf/Njm6ZOD+zDwM3TLxGWDjiFi59LWdTLuuo+/2ttteXcvoJeDSzHwrNcvlbGBfFMz6sgf6//UkmrZPZj6fmZcA66O7oIzEWKLO9ebtrXfqbEd19d111cl9yXDKPri1gx8j80DTvn4DTDfg9XXRtJ6JelyfWdGZs+uApZtenxgFAlbrdVnozNkpKPt2NL2+HNqpzVixnAXQNKmzgEWaXp8N3fN8iZFuD/3w6MM22XV90HXiT6Dr6UY1vf55lNxutpFe7v3y6Lf13491qqlNTgecjq71nKS81rhN4gXAGh3Wqev+DdgQuB+dJZqg6fUd0cHDVB3Wqeu+29tupeVcyzIq6+UhdB37RE2vrwxcDiw00r+1Hx4oAHY9uvRm4qbX5yjLu6e3KqxzvXl7GzvbUV19d111cl8yvA/POBi/9UW20YiYNCImS0UlP4kGdadExImhW4Gtj65VuqGXZRVdZXaNiAkjYqLUWbi10Jm5wyLi4FAW9PlRp3h/xfqM6/qiTdZZn8y8EvgSuh79JxHxifLWBSjRz1t1V3os1m/rvx/rNOT6NM0w+AewLUpk+ExE7A7MHspUvhAV+6M6+7fMPBfloFkN2DmU9BHg2FJOp9dVdp2V29tue3Uto7JePglMD5wZEZuVtx5CbejlOus9tomIRqK3x1B7nhC4OSK+UT4yJbpM6e+9rFed683b2/AbjnbUbd9dd53clwwv5zgYD/VbttE6k+HUmKSprsyutSb7Glf1YZvsuj4RsS7wcXQw9RDKs7EgOmBZCHgKuC8z9xrO3zI26Lf13491qqlNHoAG5c+iaftHobvO/AQlnWoknjqiYp267t8iYhV0JuieUsaaqK+eBQVCGklsv12xTl333d5226trGUXE5ijYdQs62/w8OkjYAQ38/w7cm7osZ7wVETujW6Pej5bt5WhGz89QAPAF4I7UZTm9qE9t683bW+/U2Y7q6rvrqpP7kt5w4GA81E/ZRutMhlNzWV1ndq072de4rJ/aZB31iYitgN3RNeNroJ3hLejWRA+W1x7OzAeH83eMLfpt/fdjnWpok18EdkVJFGdCiac+ApydmcdFxOyZ+UwH9em6f4uILwHfQtegfh7NEPg/dLcHgBWApzPzmg7q1VXf7W23vbqWUURsjdrPUehuR++h9XZVZl4dEasDD2Tmi8PzS8YOZXl/EyX2nA/1A7MAf8zMCyJiCbS8e3Ltdp3rzdtb79TZjurqu+uqk/uS3nHgYDzTb9lGI2I34M3MPLY8nwV1OOuj23lVHpTXVVbUl9n1QJSc63fl+cJo+tSK6P7qVS+XGKf1YZvsuj4RcSJwVmZeWJ5vjA6uXgP2ysx/D98vGLv02/rvxzrV1CZ3A8jMw0PJD6cqZW0GXJ6Zf+ywTl33bxFxTvnspeX5DsAmwE2Z+Z1O6lO+33Xf7W23vbqWUUQcDlyTmb8v05U/AnwMHTwcmJkvDMsPGMtExP7A45l5UkRMgM7Gr4jO0h6Xmbf3uD61rTdvb71TZzuqq++uq07uS3rHOQ7GP/2WbfQOYJ+I2CkiRqUyqV6AEqF8MyJmG4Gy6srsejPw/Yj4XERMkJl/y8xfoPwKe0XEVB2UNS7rtzZZR33uAnaJiEUBMvNsdAAzG3B0dJ61flzWb+u/H+tUR31uAr4aEV/IzPcy81XUz/0F2K7Dvha66N/KwA50icNnG387M3+FAhmLR8QPOqwP1NN334m33TFqWm919W93AvuFMq9nZv4V+GV579cRMVmN1R+b3Q58LyI+mZnvZ+bfgHPR7KODImK6HtenzvXmfWXv3I7GyOsOtR0NQ99dV9t2X9Ir2QcZGv3o3YM+zDYKrA78HDgc+ER5bVKUp2CaXpdFvdlmv4iu09oFWKG8NjHaWU450u2hHx791ibrqg9wCLBXaZMzlNcmLG1o6pFe7v3y6Lf13491qrFNfrZ8/mia7nSADqznHUK9uurf0NmgY1CixrmAycvrswHnNZ53UJ+u++6yjR6ADl4+4m33A8umMUt1AjS1uLGMph/qMgJ2K21oC2COptf/2lj24+uDD2ao3xIle/4usGDT6zcxAncbAPZAlwF1vd68r+zpevsScBLwvW7aUbd993C0bfclvXn4UoXxUETMD3wbXUd0Rmb+LiJmAK4FVs3MV3pQh9qS4dRVVkRElg2inC07Al3q8OtUjoRFUIf4kRzkOqmoOdnX+KAf2mQd9QklsJsfeBiYDO3IpkXX/U2ErrGeMTM3HvYfMRbpt/Xfj3Xqok3Oh3K+XJWZT0fEAsDm6LrUR4H/AO9n5hcr1qPr/i0i5kSXSVxTvvttlDH94vKR1dBAb/OKdeq6746IpYDHMvONiFgaDT6nRMtoEjQ4Hq+33XIpyO2ZeV1ELIeSTE6L+rvK/VtErIXu8/4QyrexIbr8ZkJgFFre02Xm54fpp4wVQsmer87MsyJichSk+SjwIeBfaNudvFdtsmxbC6P1/Q7KG7IQWvcTopM0ldab95W9E7qj2CrAjegOBwujJOLLA2+jBISV2lFdfXddbdt9Se85cDCeiD7KNho1JsOpuayuM7vGMCT7Glf1U5usoz4RsSpqKw+ga72PAE5EA6sly+Mp4KjMfHM4f8vYoN/Wfz/WqYY2uQqaffUkus3Vfpl5ZOi2jNMCG6OA672Z+U6F+nTdv0XEysCRwCPobOJnyrWt66FB5/Ro4Pi9zHyjXZ1KmV313WWa9H3o7jvnZ+bd5fXPosHoEmhwOt5uu6HkYkegmXwvN72+Klo+S6N2NugyKoGrh9BZ5huBazPz5Yj4KDrw/DiaIXhWZo63t98ry/UoYK3UXVQmQstnTjTjYyPUL1ydmW/3oD4fBg5FfdC86IDxWmAlYBoUQLyfCuvN+8reiYg10CzcO9CMrL+gywOeQP3s59HlZW3bUV19d11t233JyHDgYDwQfZZtNGpMhlNXWVFfZtdzqDHZ17iqD9tk1/WJiAuBM1NJfhZAt7ibF9g3lWvDin5b//1Yp5ra5GXASZl5ajlDvDewWQ7xTi519G8R8Wfgd5l5YkT8Cs1cWBs4JjP/PIQ6dd13R8S0QONuDO+juzBcjIIRpw11eY1LIuIE4C+ZeXJErIkOFNcDDs3Mszos69zy31fRGecbUUK0gxozR8Z3EXEI8FRm/jIi1kdBvmWAM4ATMvO5HtfnchQk/F2py9KZuduAz0SV9ed9Ze9ExG+BP2fmaRExO8pDsCQKpB7ZYVm19N11tm33Jb3n5Ijjh2XQRnQCmg52DhocbRwRM2fmX3s5QKfeZDh311TW/Ci6fR7wCzSD4Srg0xGxbGbe02bg2UgYcxv1JvsaVy1Df7XJIdcnZDJ0Jua9UGLORzLzcyiB3aHlzLGNtgz9tf77sU5d1accMP+jBA0moOQdALYp769aggmd6Kp/K9/7F9C4g8N6wHTA+cAxEbFnh/WBLvvu8jteRdvqkcDOwLLA1cA2mfluh/ukcdV9wKJlX/czdNBwKPCDiDi8tLGqfgxcAvwAzQg5BvgMsEA4iVnDzcC6ETE9Ort/DLAdCo4dFhGT9Koi5QzufzLzuDID4FJglfI6EbFk6Haugx6oeV/ZW2Vm2aPAJyNixsx8JjMPQ0kDN4mII6tutzX33TdRX9t2X9JjDhyMH/oq22hmHo4GoBtFxOoRMUNmvoSStswNTNFBWYehLNobRcRHuiirq8yuTTvMU8q/n4qIuSJi8sx8HtgeWDZ0Ldd4qynAchd91CbpYhspn38bOB1YC13n3Tj7cgE66FvJBx8f0Fd9UjHOtMniETQIm7j0ae+iA+vVy/u/REkDO3Ecum50vSH2by+iKcgnRsTvgGcy86DMPAnYAFg4IibusE5d9d0RMar0S48A30A5Gn6CZnj8JyJORZd1jO/ORgcKG6Pbd55ZZhqsBsyApgZX9SA6UzkT8Cd0PfK9aEbMDHVWemyVmX9AU/m/gW5Xd21m3pyZX0XrYcYeVucBdHA4UURMmJlPo5wh65b3f03Z7w3G+8reKn3+4ehs/Oci4kMlWHMLukRkNqDqmPQF1Hf/ttu+O3Xr3/vpom03BTzcl/SYL1UYT4Tu4T0nOmC/qnT8RMRfgY2y6ZrFYaxDbclwQsmuVgIuQGfRdkTX2XWapGmCzHy//H9L1Jk2rol6uLx+E7o3eMt7gEfEbI33I2Id4GtoWvFl5SMdJfsaVw1Y3rug9TRibXJA3fZAB1K3DqU+ZUe2O7AVcBbwW+BNlEBqncz81DBWf6zT7fIexjrNTv+0ySH126V/fBz4dyOoWQbjk6HrXf8OLJCZW1asxyRoYPYS2mb3R2d3Liofqdy/RcSUaBu5Efh8Zu5dXt8ZWLOcfaxSp1r67gFlNhI/fryUc1JEbJGZp1X5/riqHNhlROyEbmvZ6OtuQUkkP5OZ67YpYxJg7sx8qDxfECVXXBM4PTNPiIhPZeZFg5UzrouILwLPZeYVoUtCPoeCfReiM6tLAp/NzLV6VJ9Po9kmTzTPKAglE90eBTjXycxNOyjT+8phFhHroXV2TyiB+IbAP9Es3edREsLPZeZqHZQ5Cco/8AiwaZYEuFX77oiYBc2C+3eZrbIJ8DF0wN9R246IyUoQiohYGNgU5TRzXzLMHDgYh0UfZRuNGpPhlLJ+jjqvJTNz8fL6aqgzrJSkqXyn68yuEbEk2nl+Bzg6M98sO8avoLs7TFvKqZzsa1wVEd8HbszMiyNiHpSPYhE0EO15BtzoMkt0mWq3OYra35WZfw5NEd+3lPEWMA+wXVZI0Dmu63Z5D1OdVkLXsp8NvIIGsyPZJrvqtyNibuAEdFD+2oD3JkAHIOsAizYO4trUZ0V0G8jH0RT+kzLzFxHxGdRfTgv8m0H6t9C1tUcDPy0zJyj97U/RQPR4dCb7s5n5QLs6le931XdHxBxoWeyRmZeV12ZFBzFzZeZiVeoxLouImVCgAOD6zLyyDNJ3Q7kknkaZ1XcYrC1FxFxo/SfK4r41CjwdBayYmcsO368Ye4Qy1t8E/CEzdyqvzYr2k3Ogtv0ScHhmPtqyoPrrc0Jm7lNeawSRpkCZ9edEd3UZtC/xvrJ3ImJeFJD5UmNZloDtZ9GYdFXgMeBHmflIm7JmBL6Mtt03UD9wd0T8FN2O9wSUYHHQvruM944GNsnMf5bXZkBB2kWBxdGshkHbdum3f4CCIAui/ArnRsSvgZXclww/Bw7GUdFn2UajxmQ4EXEBiiqeFhE/Q2f2P4U6nDM7KKeuzK4LA39AA6HJ0DI/DZ0luH6w745PIuIjKODz8Sy3jisHMkujQeja9LZNdp0lOiLORNMA3wWmAo7NzKvLe4uiA9FITecer9WxvIehTquh6fsPoLsOLAz8A7XJWeh9m+y63w4lw7onM39cDjpmQTMCLszMx0NZthdP5SeoUqcrUFK0M9F6OwoFNL7aCAJUKOMrjA72/g3YPTOfKu9tXMq7OTWFtkp5Xffd5Szc2SjocT+wVWY+Ug5wMjP/ERETZeZ/qtRpXBSakvwKOmiYFE3/fRudHZ4V9Xvvpy4PHKycU9DMwsOB76N8Haui/vKl8pnxelkDRMRJ6IBoJhSQ27Kx74iISTLznYiYNNvk7Ki5PqBrzx8HvpmZrzUFD/YBJsvM71Uoy/vKHomIY9AlAAeHbsW4Mjqzf0Rm3lM+U6kdRcR5KJD9HAr2rwZcj4LJ66PLHW5t13dHxMnA3zLzoBKMmAbtT25pBLir1Km0oyfQGHtp4AA0a/ErqVw17kuGW2b6MY4+0HWe56JkUfuigfE+lIBRj+oQ6GD6RGBLYFTTe+uhaVObd1DeDCiS+uFS7lMoqcrn0eD/SGDCimUdAuxY/r9+WU53osHRrB3+zvXRWcuV0cHQq8ClI90G+umBDtC2K/9fFx1I3AN8bYTqc3lpO1OiiPrhY/hMy20Fnd28pen5zuhAZIp23x0fH90u72Gq03nAF8r/D0Izoe4CvjGCy2nI/TaaEfAM8OHy/BQ0wDqx/K5PdLKcgalRwGCVAa9/HrgC+EjFcgLYC903fF906cW16GzRUJZRLX13aYcLlrq9hoIafxypdd9PDzTt9/qm57eiRGi/RmcOZ6tYzhzozPTU5fmDaIbJb1A+iW1H+rf2wwMFBa9ten4s8O2m5xP0uD6faNQHjbtOADYew+fa1sv7yp6utwlLH7tLef4X4HsoaPcU8K0OypoLBQUaz2dAdyz4KXBgB+WsiAKHc5Xnx6Pg4SloH7xsxXJmAK4EPjTg9R8B1zX2e34M78PJEcdtI55tNOVtdMZqDbpMhpO6pvcm1BH+Fngylen392iq6sxUT9LUdWbXGJ2g5TngQHSWcFt0Vma+iDijTOkzDew/WqYV/xANQndDWeJPDGUA7omoJ0v0gugAj4gINNCbFU25A9g/IhZv8d3xSplt8u8ul3fddZoKHSxOUKZkb4sOcBpt8uRyJrvXuum3J0Kznz4REWegg7stMvMrqH0u2UlFMvN1lMdgp2hKMlj62zPQJQ+DCiXjShRM3TUzD0D991LAiqHbKTa2oarqysr9NvCTzDwUHdzMAqweyr/SaZ3GNQuhQHzjuvtAAaNfo4DSqhXLeZbRiT5/jvqBbTPza2hmyKLj+XJuWBwdkDXa3enAZyJiV4As+Tx6aEXK+kcBnqtQstGtB3yuSp+9EDpQ9L5ymGXme2hstWiZXXZ/KonhbigYtGBUT0D7NHBPKNdOY/x9KzrgXyoiNqhYzkxoptznI+JINDNsY2BPFEhcpkoh5e+fiZKfN7/+HbTf+3DF+lgXHDgYB0XEBKVzvpf+yTb6F5R34PSI2BeYu1zftBSwSOns2irf+Smapvod4JrQdXigREtTZbl+qp1UZteHgB0YetbiKUpZN6OzMDugyzD2yswFUcS+Un3GA6ejaeBboij2peWxDhqITt3DunSdJTp1m7yjy/8zNcXuGnSAtzq6JOO+YfsFY5dngeO6Wd51S12PfwEawPwKeCQzz87My9EB8XT0sE1GDVmiM/MmFMB8GgUd9mt6+59oVhQdBmhOQdOUH46I5inJUzJ64D9Ynd4t//4aeD4i5kOBkN+ggeOandap9N1/Ywh9d0TM0ljWmXkGcFNErICuu/8ZCnB/rNM6jWsy8//bO+8wu6rq/X8WIST0EkLvvQQMCSAiRepPUBAQAZUSmjSBUAJIEaQ3I1/A0BEkNKVJCS30XqQ3EekiSAuhiob398e7L3MyzNx77pR7JzP7fZ7zzJy27jr7nLPO3muv9a5zJF2WVu/HkTlfSnoCO27WLCnnK2wnA4c431TYPQhYsS+3cwWSTk3PdeV7cgd+538SESPKTq50oT5HK6V+SvqvpD8C+yV9Vi8cV/PeSTqP/K3sdkTEYhExE7aNn2Mn9JJhnhpwasDykr4sIy+9u2NwRYaLwuVtJ0l6EkecrVtSzg04ymRaYBhwlKT/yWkpL1PClqRrmx64GVgxIp4O8wFV8C7+rmR0N7orlCEvzVloFaZPSxjmeFyTGmCDBukyL55pWL+wbQVgLJ6pHQvcgx0HtWTNhz2NFwO/Lmz/JU53OC39rSkrnTdHQZ/TcRjvsbjTuCdwe43z58Fe8/+jJfx+rtTOTzX7OehJC05JWS79vzUeMD6FGZmXwhUxbmugPhviD2i02r58ehZ2xnwc7Z1f4WVYprCtf/q7Nh6MPoRZhpve/j1l6Wh7d6M+A9LfBfFM88m4AzMDHpDe2GB9pi38vwROT+iw3U42amBh/QHMCVDm3NnxoH7f9J7Omd6ZG5OcM5NtX7qKjP64IzdDYdsm6fzXSSHOlXenjuuqnLcsLin5TFnbjYklb8GEfJVta2COgzfr1aU3Lsm+DS3atzaOuQ9Yp4SclYAhuBJHxUaOTfdg8/QdKPVM9tYlPZMjsYOw8mz3q9hLzDx/EzBbg/SZGhOybgDMUtGnsO8APBE0QwlZ0+CqG1MVtlWeg7WwYzR/K7vuOboHV5GpbNsEh/ZfgPmF7ivT1gXbXUkvGoS/R3fgCaCNMLni/6shpx/mRBiOxyJTAbMVnqcoaUsq17ZJYduuyWZfiXkOnscOqKbfi96+ZHLEXoQw2+iDOPfz2rRtRjzAXUzSig3Wp0g+9TdMPvX3iBiInQpf4LDFd0vIGotn0a7DzLxn4FC6K4BFcSf5FdXwWofZvY/Gs3GfSNo3EbWshTuiS1OO2fXSdNzzeAB0Au5YfwS8IOmLTNDyNSvzQ5iV+bC0bQFccWJ2zLA8A2Zkf6lB+nSKJToiTsLP3LrAaElHFPbNgJ/1v0oqG8bXaxER0yjNbhRmeSsl9Opi5e5CnSqszJ+n3z5H0rhkr/bHdmZZ4OdqALt3dAFLdESMApB0Ujv79wSGStqxpE5FQqwvMdHrrZgDYgjwFTBR0utVZByDK1RcgUtJVtJ6dgOek3RXSmP4X0md5sROxomYkOt8bH83xfdrGTzr1K7tjogTkk4fYYLGcWn7Bul67otEQldGp96IZN8WwVE3k9m3tP9oYAnVKL8XZl1fAA8OJuDn+zJJD0bEKcAknPd+aVdfw5SEiDgRD8J3r3LMRpKua5A+o7FDf27gGkm/KdrxdMzWksaWkHUCdhqMamPfrNjpl7+VXYD03v4ID85PkHROYd8G2JZLjhiqJas9270QdvQLE/Be1q4QH38K/qbNBDyObcENkm5O+w/DDsqqZXxbXduJks4u7Nse98U/kXRXrWvL6Dyy46AXISK+hcOHJuDwnwMlPRYuhfSxEvt0IwezETECDw42B36FGdWfk7RJHTIWAi6V9J20/hyeqfgEh97uIunekrIuwmHT43Apmd9jJ8YjFQdG1GB2DVdRGCtp5bT+JA7lfQ9HHRwhpy70ecTkrMyvY1bmD9O+OfGzOo0aVKYyOskSHWZ/vkTSsNTxGYOZ4j/CneA7ImIV7MTq8+zQEXEdZgffUdKLadvUwKTU3gfj9KJfNVCnIivzUMy38RyOhpk16fuupFeboM+3qJMlOiKWxM65B3EH7bcyB0HxmOlwB75qedp07Px4wDA8rQ/CjrItsbPlMJXoOIRLrR2OSTGHYIfMh/B12kJdSO30Bg5VXRPfryuBIwvt1K7tTu/uhZK+HRE/x9+Og+TyuaUdGL0ZtewbznNfFviXnG/cnpwFgFskLZUmCnbDM5h/Ay6S9EBETKXG5+33KETE0sB5klZN6z/G5ar/hSNnGjoQSmlEN0haJszv8mfSoAw76S4tY0OSrKVw6dZV0vq6ODXtLVwV5o1wKdxX87eyc0jP0QXJtq2N066OqXxzOyCvaLuXw323z4APJY0t8+6mSboHJS0Q5pwZlpalcbWbP6VJu0mVPmEd13ZsI5z6GW0jcxz0Ish5R/tjpuirgcsi4kU8ozYhHdMQp0G0EN39h8nJp+bA5FMj6xD3HvB2RPw6zXZ8JWkrSTvh8OK1Sur0bWBRSQdIuhOHlR+KP9R3J68s1ZwGCR8Cn0fEyIg4CIfe/SjJehhHQvR5RMT6wOKSRgCb4brq61T2S3pH0n8a6DRorc8kErlbZSAk6WjMSNwedsKdKXAZwY3xoG0W4NiIWEbSg7kjBOG8ysXxgOORiDirMkBLToPpJB2LifIapdMgHOlygaQnJF0gaQE8G3ItDqF8pIFOg9b6XJj0eREYFy5hWcturwicIen72BG6b0T8JSKGpd/YFqdmlerw4479sxGxb/rtCiHWH3F01Q9KXFc/4CVsu2fF4enTAqMxOVddHDvhGuDzS9pP0i3AuTglaAbgkEhkXzVs9560vLv34ii1WyJi4ew0+BrV7NsxOBXwmWpOg4QZgdciYv50T87Dg8+ngeMjYq6+7jRI2B1YJSJmSYO1fXAawED8PVmiwfosBLwcjlRdDUcaXYIdPj/ETqOy2Bu/6/0jYihwBHbWrwxcExELSHoofyu7BLtgJyo4hexD4PLoAOFkG7b7IszldTwwPCJmL/nufgo8FhErpX7eA8Bl2PbuEhFD5FKsE2rIaevaLuvItWV0DbLjoJcgWohzKpEGYzAD8mBg8YgYFQ1krVcLIdalmHxqJewpHI1ni9aoQ9Yn2EGwMvbEF2fTJpEIv0pgWtKgMCLWAZ6Q9BNJe2OOhJqGKCKmTZEJxwBb4dyrsUnPfyX9vldSn96OlSjHytwodAVL9HGYcAicq7elpOtShMK9uLOdYQzAIZP7AEviaJx3IjHW4wHEio0cQKiFlXmbVtt/hVmZ12nrvAbo02GWaEkX49Kmlf83ws6aMyPiBpya1W5KQRvyJuHSXT8KE2IN0+SEWOuXkZEGjMcB80h6ENftfgQ7k+uNMHkXmBgRe6T1WXAI7JF4BquqszZ9H8+VdHLS7zVJm+EojW2ihZiyr6OafbuPkt9aSc/iZ/CoMIv6OcDDks7A0YKNHhD3SEjaExiFB2pP4dSQi9P7/xAN/p7IhIzP4Xzyg4HfSLpT0lnYebhpHeIOxf29B9O5oyWdLmkrPMGSB35dhxMknQj+pkgaifkj9o+IuesRVMV2P4qjDg4qKecjnN52QkTsEhHTJ90uw89EZaKuVvRal11bRhdBPYBoIS9du+CyYuvhzt/huIN1foN+uz+OeNieVJsVOwmewYPqaeqU9QsKpEA4YuEWXApsDeyBrJtcCc/yzVNY3x64ssY5AzHHwhKFbfPiAegJOKTrCWoQvfTlBc9aXA+s3mxdukofWlK+bgZ+2uxr6kkLDp2Pwvr3cKfxM+DZJum0AE5VeqZoO7Bjc1wT9FkwPTtPF21HGX2KbdvGvrlwasG2JfWYGjsdfpDWB+MB/u3UQYhVkFchejsSO2xfxXnTixVtaB3t9N2ky0s493ZE2n4QsH+dsirkXCslmSfWas++utRr3wrHz43zko8DNivsvwc7JJp+bU1u136F/wcCe2Fna6X9bmnk96Twu7Pi1KQN0nu2SuH+b9MBuUsDv0r/V2zCeOAnzb4HvXEp2Lb5cKTPmcX7W1JGl9nu9N34P1xFZwQmzHwE2K4Z15aXLnjGmq1AXjp5A/3BOQKzl1bYar+Fywy+S4NZonFkwPV4Fu3YtG0mnNtWYdYfUFLWATjc6Spg98L2WTE54uXAL+rU72u24sK2wOWmqjogUgfoQxxKWGTlXg64EDOO79fsZ6LZC21XHZg6tXNdrMxdpM9A7NiZrvDhmaqgV6f1wSG+DasM0dMX7JibLf0fxb/p/09oIJN2eiaH44HiwLRtD5rEylyw21MXtu2KIwOuLqtPtQ4TZtS+uw6dRmOHyuPA4WnbrDh8+QgcubBVB651KB6c79OBc/vhWe4KK/cAHC5drNTwHIXKPR34jRVxNEJDnsUpcekq+4bD1+9o9vX0pKVoAwrbdqZGZacG6bYPdhreQY2JlRpyilUVtgfGN/va+sKCSS47bNs6Y7sLMqbB1cu2w86wm4BTSp5b7fvWqWvLS8eXTI44hSPMyDuHpBFFwpKU/xOSno0GsUSnPNSbJC2d1sfTQqwzAXMd1KygUJA3L65DPh6Hx06FmWE/kPTbstcVbTCOh2vJT0oEQKNw7ma7IfQVYi3Mh7AzMEjSQa2OGYCrRPTplyqqVB0oHFOKlbmL9Pktji75aYEEcTpJn5XVp3BetL6/ETEXDn2/TdJj3XYhUwgi4jgcGbQiDm++BRNHvpn2/wjPav+igTqdhNMlFsIznqMq9z8idsCkrZ/K3CeN0Kdot1szlv8Up9K0yxLd1nPYxjFDgC9UolpJG6RoV2Bb+wkuL3thSUKsynvST055qGxfXKlqRj2keGFW7kVxqOxj+BtwvcxzUOFvGCaHr9aSNdnvRkTgb+RX9ejUG9HefUv7usy+JZ6hj1Wj+lFfQaEf8vX7HBEzYUb7+5v1PSk8DwsC8+PKKq9K+ncd537jnYqI2XBkxfWSHu0W5TMqto2O9EXbsJMdst1t6ZSei0GYZLHDcqBj15bRNciOgykYYTbtsTiUbFJE7IiJAt/HIfMXFzukDdBnOeAkTGayBCbT2gCHzG6Ba7XfVFJWhbPheFz66zRcmeEYHNEwSlVKgRXklGEcnx57xNsl6YuIs3GZxdHJKXM+nq3cQ9I7bXW4+iKiRNWBButTSW1ZQ9LElF+/Ch4oPg1crnJM89Nip1BxMNSuM6GvIiIWBf4iaUjqIIzBBImXAGMkfRYuWSlJnzZIpyXxbNmQ1CkfgyOHRjbjna1ityfgwcIlJWQcj0kMTys+e5WOXdRZJSAi1gL2w6VuV8SOgx/jWZ0f4OixB2rImApHmkxQSwnO/pjMdlK9NjImZ+UeiGetWrNyz5jkt/kspd9fAHijoFPRYdDn7XY7960ffke/6qx9KzyTfb48MUBEDMbP8MNKZJ5RKFXbDCdWOw7xqTryjiS7P6Gtb2X6P1cw6UZ05BvQjp3sB+Y8KPsMhCtK/U2FKgmt5JSyAemb+JQK1cla2aT8DDURmRBoysZWOHxzgYhYHYcTXgK8gAfqyzVSGUlPAw+kZTdcT/uJ5Cx4CvhJHbImJUN1GA4tfhMYhEPKn8EOhTKoxTi+HTC4htNgIHCVpNFJt+dw5ME7tBC89OnOZwFVWbmTY6FhSDMkTwLzhpmdN8dpLs/j3LuqBE0RsWREnIv5Qg4Ll5AsOg1mB06PxOqewVDgxfRhfx+Hur6Oyesui4gZJH3SKKdBwuaYgR9JE3FqyjCgci83CUc3NQrt2e1nge0iYsVqJycH7TY4fH+R4r7UqRoEXJmcXaWg9knRzsT5qD+qodPymP/laODeiNg/yf1v6jAOBk6r8z0psnJ/oW+yci8r6eMqToPlcNrHwZgocuqkU6XzWdFpQB069SpUuW+TUhvNQQn7FhHzRMTSaQAyGZKc2YEr6nkmeyMiYgXgGuCXuD36g9uo0N71vied0WdwuITpNxxD9b4jEbFMRFyK06wObCVL6bcO7jLl+zBKvm9Xpr5rLVnt2clJ9dju1Kf+LbbbRX0qcubAz3xVncJE6rvj0rut5XyV+mClri2jm6AekC+Rl44vOC/2BRxSumlh+5HAAQ3Uo5i/PAgPGscDP0zb6ibWoSUf/UA8w/9PHL0wAJi3DjlzF/6fDTsjHsaDidcp5N+VuU5a8uM3pqWKxWRt0FeXdO8r9+1nlfuf1k8iEZo1WKedsTPjHGDHwvZDgaNrnHsLJojbCjuedm61f0ZgaLPbvacsmEfiLExquhVwKYmfBDts5muCToNxGc6iTbkYOxWXxjn9DX13O2O38cBjJC5j+TwwvI1jFqtDl06ToqX3ZO9kn7+b7OvzJN6YJHv5DrTTHjjHdhdg+sL2Y3DUWbVzrwf2xZwI5+B0t71w9BE4/aFunXrT0lX3DTu9nsfEzHMzeX+g8r2smxCzty3pmazYw1OBn2Nn2IZp24zAtxqoz3O4D/TjwrbivZuh7PcN96cOxJFKf8DO4o1oIcleDPhOs+9Bb1jw5FmXvG9dZSeTnB3S/0tiXrNjK3Iqz0AJOeeRuMtwdOgoPPm0RT1y8tKNz1+zFchLB2/c5GQz02DikWlp6QTeCGzdZB1/nj4m9+O0ibLntSYvnCt9iHav8/e7jHG8ipz1cGRF05+JnroUnsmmVR1IH9iL0wd347Tt9mrvCA7Rvq+wviZ2hs2R1geTiPbyMlmHZWMcoTEGOKKw/25ggybpNnX6W3Ec7IUdWdd11gZ0pI3S/3XbbVwObXxh/WDgXBw1NZn8TupZmhQtdSyvBlZqtX07XHHmu53UpW5WbmBtzLdTWX8TO59H4ei3H3dGp96w4EHhVZ29b8CQ9B5tjp1af0r3bFDav2jl/7684Ain64GZ0/or2AG2F047GtlgfVZN37MR6Z0YR8EJiUPXSxEG4+pERbv0KnbWH43J8FZtdvv3lgWnbI3DEbydet9w+eGbC+sdspPAasBrwOqY0PaGZK+PIpXxLClnKux8OjKtP5qezx0x4fvxzW7/vGRyxCkebeUeRcTuuNTNWk1Sq6LHrMD0mFvgn3Jd12rHt5tLGRHzqYVcrdM55RGxCbCvpDXK6NQ677CSJ4tZ+fursaHXPRI17t9O2GmwToPVqvz+jHgGZHU8yzw9zufbrco5w/AH8UwASV9GxNWYw+HPEXEJcLIyIeI3EK7Z/GlhfS/gZ5JWaaJaXyMiFsEpLI9IWrsJv98hu53SGCZJejzlfM6DHSCfATup44RTU0v6X8HeLUhJUrRkC3fAUWZ7KeVtF65pNklHd0SvJGMaPBu2PHZGf4X5ZkZWOWdNTML3WERsAGwkafe0b1tgYUm/6ahOvQHpHq+LbdxuHb1vEbEu8Jmk+9P6LsCWOArhLMx1dKikcV1/FVMOImJZHII9B/AFsJSkldK+YTgybi81iAcihY4Pl3RjWj8MVzwYhwdvVwHHSLq7hKzhuFrW/RGxJbCJTEY8A7A/8JGk33XXtfQlRMQsuGJV5X3bHTvt6n7fwnxdA9L35Pv4vu2a9pW2k2FuoxF4Mm5NbA82Tft2wBHCR5W8vkVxpNmduDzx3mn7YPyt203S52VkZXQPMsfBFIw0mG3d+RwIfIBnjBqhwyrJQTCZXqlj/CHwtqTnajkNEo6LiL1SR7Qor5+kN4s5qiX0isq57RzyEu7s1sKA1KmuVKvoVxwgS/oyOw2MKk6DuXDI66hG6VK8/+k9+VjSbZgcc0vcSRtZ5fwfAEg6VSYLqnTmbgIWiYjNgFmy02ByRMTU6f34tHAPBuDqKrs3SadvfOckvYzzMQ/85hndj47Y7dRB/FzS4xUZkt7AnazZgd+FWctLIeUcr5FynCtEU1Ol9+U14AFJD9dwGozCM9TXYyfqa6mjWEE/zCdRNwq29st0zX+UtD52Huxb5bz9sEPl+bTpTiZ/12fC5L19FhGxL3AKnq38EN+3PQqHlLpvEbEPTiN5sLJN0lk4Tek1HLHyYXYaxL64QtSv8Wzs2Xi2v4KVcBpXo5wG6+GonNmSU500sFsDT4hMxAPKMk6D9YCFgfmTHbsBPxPIxMMTsOMvo5NIg+rFgQnhqjlIGoPTQl+njvctyRoAfBYRS8tcZEUbUMpOJjnTY0eTMDH7yMIh0+PUhVpyFgtz/gzEzrVDgBUiYu30jG6M35HsNGgycsTBFIg0EHu/+JGJDrJpd1KPYbjawVpqo3pDmMTkbFz7u+rLHiZouQl/TI+U9I9W+wfhEKotizMjVeQNxl7utliia7ZR8sQegjvzswO3Sjq/lT67Aic26mPfU5EGD69IuqI4k1qYvWx41YHUgflf5T6HSYQq5DpVGYLDzPvPY/LL+3Gt4CfSvjlwysUAPDv8bPdeSc9HuCrJQsDfC+/bVCQeumhSqbueYifT77bFNl1kUq+qT7Kl9wDrS3q1cP4McrWQIcARwK6S3iuhzwrA6Zg3ZjocSlpspzmBw3H97jZL3qZjHsT58K+kbTviNJVbcSj2BjjS5MlaOhXkVrtvVVm5k04PA2tKejV1OGfDKSovp3f7Lpzm8FRZnXoTUhvdi52hTwJb47D10/Ag72k8qKx635Kc+/Ds+cvALyW9XrxHEfExsHrFfvZFFNrpv5hPZR/gPdyfWQWnYY7AOdzd/kxGxNw4JeoBYEHcz3kBR5j8Mx3zEc5Nr/reFmTdj50Hh+CB42C52tQswG049720Dcj4JsJVZi7DZXLfwtwGD+AqRhX7W+p9S7IuB/6VZM2LnQ7XSXqj0M+paicLct7BNmAQTlO5VNJLqZ88HnNbVbMl82A+pIqcGbDDYQk8yTcBR5ztKemZateW0f3IjoMpCGkw+xvMWjoVHrQ+UxigzY6JRUoNrrtAn+txtYHzwyXGFsYe65sqnuqIWEzl6ohfg2eGpscdma0l/bXVMTVlFTrQL+MQzEsknV7YPxjnBFd1ZkTEndhz/jCuCLAn9qYeIunhiFgCf2gfbE9GX0CYlfs2nOO8d+s2TR+OI/EApNtLg5Zw+MyOZ0PadPgU3qW9MVHVO9jTfT+egdsTf+D+nSIX+jTS/T8F50Z+qBRWWNg/GBMlNszBVtJOVmxAI+xkLQfrHJiUql27HRHnYcfDieHKJFvhChaP4M7eU2nW6Pm2zm9D3nXAjZLGRMSpmHxqIzyrPy4NuBeu0Wn8BSY72z4ivoP5Fv6Oc7mnB44DJtahU6e/b610Wgnbgk8xD8Pt2Ekyq6T9yujUGxER5wOPSTo9Is4A/iHp5LRvPRyBMFHSi3XIOQ14WSkcPTm1lsYEm+d15/X0dLRqp9/j9h6d9u2OZ1gfVYnZ/S7SZwxO0zsuHDGyDiZp/QhP3nwArCzppE7KGo8JaGeRtEv3XE3fQUScjtv6hPTdPRlXHngNc2UsDqwm6dxOyjoR824MllQ1SrSVnKE4leAV3B84Fn/755f0qzrkrJDkPI4dG3dj50ZUHFsZzUVOVZiyMBrP8IzBXrhKOHUlbP49zDTdiM7warjc49/TbP5oTNw1CYfMbpp0KuM02ATPnJ0i6RjgImC3NOgozszVlIUN4J14sHoGcHxEPBoOgULSu8B+NZwGc+Lc3j9IukvSXzD5z9XA3hGxiKQX+7rTIOEI3OYC7k8OpK9TBdL2sxrhNEgYg2c8rsDe+Z9FxK0RsXLaPxtwW3uD2Mq7hGeI1sEEQYfgcnSrAsPkHPRtu+0Kpiwcj4nRjgEGRsT3I+K4cI45ONzx9kY5DRLK2MkDGmEnE44EzpM5MpZMbXRsRKyR9Pk3tkntDYbnwrZ1zrTpeBxKfAV+nndMcsoO0OfDaQVj06ZKWdL7gXMjYqSc2lNr9vNGWlJ4tsJEXafjmdRXgKfL6pTwO2rft/1r3LeiTlvjZ/OgJHsh7OQ6oA6dehXCYcXLknhb8DO0Q0QcCyDpVkmPlnAatJZzDbBzRByZ5HwlR2Od37aEvoE22ukqYKdCe4+RNLqBToOpcbTDjGnTTnim+TTgMcx5cU9Jp0EtWatiZ+IebQrIKI3Ux/4ARzqSbPMzwIvAUngm/gXsWO2srO0lnYjvXT1ynsDRSv9IcnaR0ygOq1PO47jfNQE7nlaW9FZ2GvQgqAcwNOal9oIHMXcV1r+Fw5SWSuvzk9h6G6TPopgx99e4pNMVhX07AIfVIet7tJTs6Zeu5TLc6ainVOI8uONYLE1zNDZcl1NHKTg8ID6z1baZcV70nmm9T5dfBH4K3FFYPw44vLDe6BJ3c6ZncVBh20CcW38xsEid8nbEg5eBOIJlJI5CWa/Zbd8TFmAR4JbC+kvYWXconi3Yogk6rQvcXVhvtp1cHc/irEbn2KYXxPno7wP3FrbPgsnM6n22f4+rJlyECSIr24ele9i/xvmR3os/4EHCOGCmwv47MbFVWX3WAu7szH0rodM9wPca/Uz2pAU7jOautFf6uyzwF1KVijJ2u4achlUp6elLmfZugk5LYef6jUxeNagfTmH5dhfJuh8P+pp+H3rDgkP1rwQuxP3ux9L2hXDE2tRdJOs8PKlcxg50t5yFcbpz1e9RXhq75IiDKQdvY7ZUwvmwT9LiIQbP0izfCEUiYiFMdvcn7F18AYeYVlCKDCXJWhCHkn4UETOohfBrdzybVprwS9JbuJ1Oj4g5ImI7HLp1FO5wr1pCn+UjYh3c8Z0nIp6NiBFJ/kc4BGvFtN7X83wmkrzSyWt8FbB+ClttePtIqnAS/Law7QvsNHibNINZiIaohfvSOfcB10o6BbNe39qFak+xkAkGv4iIyyPiXOADSbvJTOzHACvW0dZdhZeAC8MErf2baScTXsSD8/Vw5/pzSbtIOgwP3oeUESLpNUlb4MiXQwu7huOUqZfLyImIX6aQ0iNwmsPZ2NlWQSmSNhlfSNoev29LAOOT/dwclym9rYxOFZHA2M7ctxI69Zd0Zx069TpI+p+kf1XWw3wvz+L0q10jYngZu11Dzm5hlv0+jzLt3QSdXpA0FPMqPJIixObC/aNpJD3URbKmlvRwV+vfVyFHBhyF7fbHOMIDHPk7VHVw9tSQtbwcMVTGDnS3nCF4UrFP84j1NGSOgykAaaA+Ox5gv6oUZh9mfv8unt3bVtJGDdBlblyf9yvgXeBCSdemzt5/wxUW7kz6lCHW+TPwP3xtF0u6JFqIsJbHBF27yikG1WQtDMyFPZxHYsfGfZg4ZnxEHIRzW9tlUU/6XI0HmbPicPRFcXjrREz4tS2wuRKzeV9FuLTUTNjIv6AWEsIFgVMxsc45jTL4EbExzs19CIeFrgKMVsr3S/mXQyVtV6fczfA930IONW840WNPRERshCslPAfsnf6uLOmgtH8PnNO/eQN1qpRw+pqkMW3/IbaTj9EgO9lKn1dxxNi6OIrpubR/Tzy7t3UH5U+DoxZGyYzYtY6fE+eLbqhEPhsmEf0jzo+9FEeL1UXSllLJ5sXvyfr4mm+XdFfJ8/fDjOA3S5pY2L4Rvm9/pc771lmd+iLC5fgekDS+5sENkNPb0RPaKcxBdCCwIe73nCTp5mbLyiiHMNnrvcCBSmU1my2rp8nJ6Hpkx0EPR6uB+kfAZZIuTvvmxAQ0M+POYLezjUbEH3DH/NjUIf8dsIES/0CYVG72NKNWj6wNk6yNJf2tcEwZQsS5cfQDuAN6Dv4g/yftnw2H022qVoSLreSch6sDHB0Rv8bhsRNxVMX82Ii9XU1GX0Bq77/g6ItFgW1kcrbp5TJ8P8S1mzdVgUW+G/WZCQ9c38WzumdjErSLMVHTXTitokMOn4iYWdJH0WAm/p6KQnu/h8PBL5T0t4i4FoeKX4OdCVvUch52oU6VqgPfl9nzAzsR++Nw4ftwWP+GakAljII+GxQG6bPiifEJETEzHsyO6EgbpeubD9vL35c8p0iwuCywBX5/H8AzOy8Cj3dmVj7qrKARLVUQvifplXDd97mwM/lNzHkwmE7ct3p16mvoKmdodqqWQ09rp2TPZ8XjgVd7iqyM2kiRnptKuqKnyOppcjK6Htlx0MNRa3AdEWOBjyXt1gBd5sWEWtvK6QSksPS3ZFbduXFu6u2qQYZXQtZCwBKSbqkipiKr2EYb4zDVHygRPKWB7PySzqgiYz7Mg7CepM8i4gU8A/ccnr3+RNLhtXTpC2jlYDkE518HdrCMS06EFSU92gBdWldBeB+nF9yMn8VHMD/B+5Ie6259ejvaaO/3MHHoOGBN/AyA811vaKBe7VUdeBRHEY3C5Tl3al9Kt+mzTNJnCJ49vwFzHsxfLQKqi/WZC9uyP0jaL1zF5m5MSjUEp5kc2whdWunVXhWEWXA7fRf4QtKOjdYtIyMjIyMjY3JkjoMejDS4XgjnyCJpHM5H3SztnwMPvvdphD4yq+meOCS8ggtoydM9G5i3ltOgpKxT8LVXRRttdC0uKfTjtH8ezLJ9dg193sSzg5+FS5FdKOk3kv6Mc6++HRGL1dKntyM5WJbCzPUA2+Ba8OPwzODOAI1wGqTfaV0F4THMu7AdriH/mswUnp0GXYA22vtx3N474MHwzZIObrDToFrVgVlxadcRmDelGfocixmir8K8LT+VS8RWZa3uSkh6G1gBmD8i3sdRYaMlXYD5KNZI6V6NRntVEEZjnpxdgF2boFdGRkZGRkZGK2THQQ9GicH1+cA8alxZMYBnJX0CEBH9cd3ufhFxNDBA9dVsriZrOklVB/tQqo3OxDN7k0rImpj+fizpuMKu4bgWcZlykL0aVRwsV2OCzEWb4WBJjoqxmAvkRTxAOxkTdW3QaH16O6q0936Nbu8qg+Kx2Ok3PCIWKOPQbJA+QyNiwTI2qYv1ao9gcRi2b680Up+UbvEu0D8iHsM8C3+W9IZMrDgcGK5MjJWRkZGRkdEjkFMVejiK+XBpcD0tnj1/CZORrd9M/QAiYjQuVbd2Z/JjOyqrO9soIqbDOcAHqgQBWV9GuBrFMZJWadLvLwXsBXwb16IeGRGLVnLMM7oWPbG9I2I1zOZ9Z1pv9jPZo/RpjaiTYLEb9fg5djx+gBm1lwD27yntlJGRkZGRkWHSqIwejCKJTpp5+W9EvIXDXNdummKT42xcZuzOZsjqrjZK5CwLAOdlp0F1JAfLaMyq3BRIeiEixgPzAAekzaXK1GXUj57Y3pLurfyfBsWnYH6DrE8rpBn/OXGlh2bbt0sx58K2wGmYOLJptiQjIyMjIyPjm8gRB1Mg0kzfNpIOabYuFXQlc3VXyOrKNuppLMg9DcnBsjiwvqRTe4A+MytXQWgYemJ7d6TqQF/SpycjV0HIyMjIyMjomciOgykUuXNVG7mNGovsYMnIyMjIyMjIyMjonciOg4yMjIyMjIyMjIyMjIyMjHaRqypkZGRkZGRkZGRkZGRkZGS0i+w4yMjIyMjIyMjIyMjIyMjIaBfZcZCRkZGRkZGRkZGRkZGRkdEusuMgIyMjIyMjIyMjIyMjIyOjXWTHQUZGRkZGRkZGRkZGRkZGRrv4/9LPz16B2aW/AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAHRCAYAAADqjvsdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACnwUlEQVR4nOzdd5hkRdnG4d+7y5JzzlGiShAkg0gSBSSIJEFUEBGQjBJlySgCkpNkBCSJRMlIFImCiqgofoBIUkRAReX9/niqmbPjnOnTM70zs8tzX9dcM326u6a6+oSqt8KJzMTMzMzMzMzMrC+jhjsDZmZmZmZmZjZyOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMbcSLilxGxxnDnY3yLiCMi4tWI+PMw5mGNiHh+uP7/QEXEohHxeET8PSJ2G+78DLWIOD8ijih/rxYRTw93nszMbOLlwIGZmQ2piHg2Itbute0LEXFv63FmfjAz72qTzvwRkRExyXjK6ngVEfMCewNLZObsw52fkaB8nx9o+PKvA3dm5jSZedIg/+/YiLh4MGkMVl/HRVOZeU9mLtqNtMzMzPriwIGZmVkfhiAgMS/wWma+3OkbJ9RgSZfNB/xyuDMxWP4uzcxsQuDAgZmZjTjVHtOIWD4iHo6INyLipYg4vrzs7vL79Yh4MyJWiohREXFQRPwxIl6OiAsjYrpKup8vz70WEQf3+j9jI+LKiLg4It4AvlD+9wMR8XpEvBgRp0TEpJX0MiJ2jojfliHzh0fEQhFxf8nv5dXXV963NnArMGfJ+/ll+6fLNI3XI+KuiFi8V5l8IyKeAN7qq8EZER+MiFsj4i+lrA4o2yeLiO9GxJ/Kz3cjYrKash+n17/XkPg1IuL5iPh6Kd8XI2LjiPhURPym/N8DKu8dW8rgwlI+v4yI5fr98hu8NyLuAD4OnFLKb5GIWD8iHivl/lxEjK2k1Rqdsl1E/F9oesiB5bn1gAOALUpaP6/Jz7MRsW9EPBERb0XEORExW0TcVPJ3W0TMUHl9J9/lpSiQdF3Jw9fL666IiD9HxN8i4u6I+GBN3t6bbhIRF/VOKyJuiIiv9XrPExGxSZPvwszMzIEDMzMb6U4ETszMaYGFgMvL9tXL7+kzc+rMfAD4Qvn5OLAgMDVwCkBELAGcBnwOmAOYDpir1//aCLgSmB74PvBfYE9gZmAlYC1g517v+QSwLLAiGj5/FrANMA/wIWCr3h8oM28DPgn8qeT9CxGxCHApsAcwC3AjavxVAw9bAeuXz/yfapoRMQ1wG/BjYE7gA8Dt5ekDS/6WBpYClgcO6p2vhmYHJkdl903g7PJ5lwVWAw6OiAUqr/80cBkq02sp30dDfb43M9cE7gF2LeX3G+At4PPltesDX42IjXultyqwKPoevxkRi2fmj4GjgB+UtJbqJz+fAdYBFgE2BG5CQYdZUJ1qN4ABfJdbAf8HbFjy8O3ympuAhYFZgUfRPtmvzNy2j7QuQN8RJX9Loe/vhnbpmZmZgQMHZmY2PK4pPbGvR8TrqEFf59/AByJi5sx8MzN/2s9rPwccn5m/z8w3gf2BLUvv/GbAdZl5b2a+gxq92ev9D2TmNZn5bmb+IzMfycyfZuZ/MvNZ4EzgY73e8+3MfCMzfwn8Aril/P+/oYbfMo1KBLYAbsjMWzPz38B3gCmAlSuvOSkzn8vMf/Tx/g2AP2fmcZn5z8z8e2Y+WCmXwzLz5cx8BTgU2LZhvnr7N3BkyeNlKKhyYvl/vwR+hYITLfdm5o2Z+V/gol7PtdP4vZl5V2Y+Wb67J1DDvfd3dWj5Xn8O/LzDvACcnJkvZeYLKHDxYGY+lpn/BH5Iz3c92O+y9ZnOLeX6L2AssFRURtB04FpgkYhYuDzeFgVK3hlAWmZm9j7kwIGZmQ2HjTNz+tYP/9uLX7U96uH9dUQ8FBEb9PPaOYE/Vh7/EZgEmK0891zricx8G3it1/ufqz4oQ+CvL8PF30A90zP3es9Llb//0cfjqfvJb23eM/Pdkp/qqIjner+pYh7gmSZpl7/nbJiv3l4rDXnQ54P+P3P1jhFvA5P3Nc2iRuP3RsQKEXFnRLwSEX8DduJ/v6ve6TX9blqafteD/S6JiNERcUxEPFP2vWfLU70/U1slsPEDYJuIGIVGO1zUaTpmZvb+5cCBmZmNaJn52zKUe1bgW8CVETEV/ztaAOBPaNG8lnmB/6AG3ovA3K0nImIKYKbe/67X49OBXwMLl6kSBwAx8E/Tr3HyHhGBggEv9JO/qufQ9Iy2aaNy+VPNa98Gpqw8nlDu+HAJ6lmfJzOnA86g+XfVX7kOxEC+y96Pt0ZTZ9ZG02rmbyXX4P/39XkuQCNP1gLeLlN7zMzMGnHgwMzMRrSI2CYiZim9tq+Xze8Cr5Tf1cbypcCeEbFARExNz9z1/6C1CzaMiJXLXPOxtG+ETQO8AbwZEYsBX+3Sx+rL5cD6EbFWRIxBt2r8F3B/w/dfD8wREXuEFkOcJiJWKM9dChwUEbNExMxomkbd7QcfB7YuPd7r8b/D/UeqaYC/ZOY/I2J51PBu6iVg/tIb3w0D+S5fYtx9eZryntdQIOeoDv5/77QogYJ3gePwaAMzM+uQAwdmZjbSrQf8MiLeRAslblnmqb8NHAncV9ZKWBE4FzWK7gb+APwT+BpAmX//NTQv/0XgTeBl1Dirsw9qgP4dLQL4g+5/PMnMp9ECdicDr6LF9zZsOg89M/+OFu7bEA3J/y1aJBLgCOBh4AngSbTQ3hE1Se1e0ngd9VBf0/GHGR47A4dFxN9RYOTyNq+vuqL8fi0iHh1sRgb4XR6NgjuvR8Q+wIVousMLaN2I/tb2aJdWy4XAh6kPGpmZmfUpMrs9Os/MzGzkKyMSXkfTEP4wzNkxG+8i4vPAjpm56nDnxczMJiwecWBmZu8bEbFhRExZ1kj4Dup9f3Z4c2U2/kXElGhUxlnDnRczM5vwOHBgZmbvJxuhhev+BCyMpj146J1N1CLiE2hNkJfQIpJmZmYd8VQFMzMzMzMzM6vlEQdmZmZmZmZmVsuBAzMzMzMzMzOrNclQ/rOZZ545559//qH8l2ZmZmZmZmbWxiOPPPJqZs7S13NDGjiYf/75efjhh4fyX5qZmZmZmZlZGxHxx7rnPFXBzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslgMHZmZmZmZmZlbLgQMzMzMzMzMzq+XAgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslgMHZmZmZmZmZlZrkuHOwPvK2OkG+L6/dTcfZmZmZmZmZg15xIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVmtxoGDiBgdEY9FxPXl8QIR8WBE/C4ifhARk46/bJqZmZmZmZnZcOhkxMHuwFOVx98CTsjMDwB/BbbvZsbMzMzMzMzMbPg1ChxExNzA+sD3yuMA1gSuLC+5ANh4POTPzMzMzMzMzIZR0xEH3wW+DrxbHs8EvJ6Z/ymPnwfm6m7WzMzMzMzMzGy4tQ0cRMQGwMuZ+chA/kFE7BgRD0fEw6+88spAkjAzMzMzMzOzYdJkxMEqwKcj4lngMjRF4URg+oiYpLxmbuCFvt6cmWdl5nKZudwss8zShSybmZmZmZmZ2VBpGzjIzP0zc+7MnB/YErgjMz8H3AlsVl62HfCj8ZZLMzMzMzMzMxsWndxVobdvAHtFxO/QmgfndCdLZmZmZmZmZjZSTNL+JT0y8y7grvL374Hlu58lMzMzMzMzMxspBjPiwMzMzMzMzMwmcg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWq23gICImj4ifRcTPI+KXEXFo2b5ARDwYEb+LiB9ExKTjP7tmZmZmZmZmNpSajDj4F7BmZi4FLA2sFxErAt8CTsjMDwB/BbYfb7k0MzMzMzMzs2HRNnCQ8mZ5OKb8JLAmcGXZfgGw8fjIoJmZmZmZmZkNn0ZrHETE6Ih4HHgZuBV4Bng9M/9TXvI8MNd4yaGZmZmZmZmZDZtGgYPM/G9mLg3MDSwPLNb0H0TEjhHxcEQ8/Morrwwsl2ZmZmZmZmY2LDq6q0Jmvg7cCawETB8Rk5Sn5gZeqHnPWZm5XGYuN8ssswwmr2ZmZmZmZmY2xJrcVWGWiJi+/D0FsA7wFAogbFZeth3wo/GURzMzMzMzMzMbJpO0fwlzABdExGgUaLg8M6+PiF8Bl0XEEcBjwDnjMZ9mZmZmZmZmNgzaBg4y8wlgmT62/x6td2BmZmZmZmZmE6mO1jgwMzMzMzMzs/cXBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vVNnAQEfNExJ0R8auI+GVE7F62zxgRt0bEb8vvGcZ/ds3MzMzMzMxsKDUZcfAfYO/MXAJYEdglIpYA9gNuz8yFgdvLYzMzMzMzMzObiLQNHGTmi5n5aPn778BTwFzARsAF5WUXABuPpzyamZmZmZmZ2TDpaI2DiJgfWAZ4EJgtM18sT/0ZmK27WTMzMzMzMzOz4TZJ0xdGxNTAVcAemflGRLz3XGZmRGTN+3YEdgSYd955B5db66qnFlt8wO9d/NdPdTEnZmZmZmZmNlI1GnEQEWNQ0OD7mXl12fxSRMxRnp8DeLmv92bmWZm5XGYuN8sss3Qjz2ZmZmZmZmY2RJrcVSGAc4CnMvP4ylPXAtuVv7cDftT97JmZmZmZmZnZcGoyVWEVYFvgyYh4vGw7ADgGuDwitgf+CGw+XnJoZmZmZmZmZsOmbeAgM+8FoubptbqbHTMzMzMzMzMbSTq6q4KZmZmZmZmZvb84cGBmZmZmZmZmtRrfjtFGjg9f8OEBv/fJ7Z7sYk7MzMzMzMxsYucRB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqTTLcGTAbH57f754Bv3fuY1brYk7MzMzMzMwmbB5xYGZmZmZmZma1HDgwMzMzMzMzs1oOHJiZmZmZmZlZLQcOzMzMzMzMzKyWAwdmZmZmZmZmVsuBAzMzMzMzMzOr5dsx2ohy3BYbDPi9e//g+i7mxMzMzMzMzMAjDszMzMzMzMysHw4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVmtSYY7AzZxOHWnOwb0vl3OWLPLOTEzMzMzM7Nu8ogDMzMzMzMzM6vlwIGZmZmZmZmZ1WobOIiIcyPi5Yj4RWXbjBFxa0T8tvyeYfxm08zMzMzMzMyGQ5MRB+cD6/Xath9we2YuDNxeHpuZmZmZmZnZRKZt4CAz7wb+0mvzRsAF5e8LgI27my0zMzMzMzMzGwkGusbBbJn5Yvn7z8BsXcqPmZmZmZmZmY0gg14cMTMTyLrnI2LHiHg4Ih5+5ZVXBvvvzMzMzMzMzGwIDTRw8FJEzAFQfr9c98LMPCszl8vM5WaZZZYB/jszMzMzMzMzGw4DDRxcC2xX/t4O+FF3smNmZmZmZmZmI0mT2zFeCjwALBoRz0fE9sAxwDoR8Vtg7fLYzMzMzMzMzCYyk7R7QWZuVfPUWl3Oi5mZmZmZmZmNMINeHNHMzMzMzMzMJl4OHJiZmZmZmZlZrbZTFd7v5t/vhgG/99lj1u9iTszMzMzMzMyGnkccmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxq+XaMZm2MHTt2SN9nZmZmZmY2knjEgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslm/HaGbWj1N3umPA793ljDW7mJOR7fY7FhrQ+9Za85ku58TMzMzMus0jDszMzMzMzMyslgMHZmZmZmZmZlbLgQMzMzMzMzMzq+XAgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtXw7RjOzCczz+90z4PfOfcxq7/09duzYAaczmPeamZmZ2YTFIw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrV8O0azIXL7HQsN+L1rrflMF3NiZhOypxZbfMDvXfzXT3UxJ2ZmZvZ+4REHZmZmZmZmZlbLgQMzMzMzMzMzq+XAgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7Navh2j2fvY/PvdMKD3PXvM+l3OSY8PX/DhAb3vye2eHOexb1k3YZr9zscH/N4/f3zpruVjfBjovg3/u393y6k73TGg9+1yxppdzomZmZmNZB5xYGZmZmZmZma1HDgwMzMzMzMzs1oOHJiZmZmZmZlZLQcOzMzMzMzMzKyWAwdmZmZmZmZmVsuBAzMzMzMzMzOr5dsxmk1gJubb1ZnZhOm4LTYY8Hv3/sH1XcyJmZmZjQ8ecWBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslgMHZmZmZmZmZlbLgQMzMzMzMzMzq+XbMZqZmdlEZ+zYsV153+13LDTgPKy15jMDfm9/5t/vhgG979lj1u9yTszM7P3CIw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxq+a4KZjZ4Y6cbxHv/1r18mBUDXXUe+lh5fqD7t/ftjj2/3z0Dfu/cx6zWxZy8T4zAc/dTiy0+oPct/uunxnl86k53DDgPu5yx5oDfOxQGeseQwb73/erDF3x4QO97crsnu5wTGw6z3/n4gN/7548v3bV8jAQecWBmZmZmZmZmtRw4MDMzMzMzM7NagwocRMR6EfF0RPwuIvbrVqbMzMzMzMzMbGQYcOAgIkYDpwKfBJYAtoqIJbqVMTMzMzMzMzMbfoMZcbA88LvM/H1mvgNcBmzUnWyZmZmZmZmZ2UgwmMDBXMBzlcfPl21mZmZmZmZmNpGIzBzYGyM2A9bLzB3K422BFTJz116v2xHYsTxcFHh64NkdkWYGXp0I0+lmWiMtnW6mNdLS6WZaE3Oe/NmGNq2Rlk430xpp6XQzrZGWTjfTGmnpdDOtkZZON9Maael0M62Rlk430xpp6XQzrZGWTjfTGmnpdDOtkZbOSDJfZs7S1xOTDCLRF4B5Ko/nLtvGkZlnAWcN4v+MaBHxcGYuN7GlMxLz5M/mPI20dEZinvzZJsw8+bNNmHnyZ5sw8+TPNmHmyZ9twsyTP9vEYzBTFR4CFo6IBSJiUmBL4NruZMvMzMzMzMzMRoIBjzjIzP9ExK7AzcBo4NzM/GXXcmZmZmZmZmZmw24wUxXIzBuBG7uUlwlVt6ZhjLR0upnWSEunm2mNtHS6mdbEnCd/tqFNa6Sl0820Rlo63UxrpKXTzbRGWjrdTGukpdPNtEZaOt1Ma6Sl0820Rlo63UxrpKXTzbRGWjrdTGukpTNBGPDiiGZmZmZmZmY28RvMGgdmZmZmZmZmNpFz4MDMzMzMzMzMajlwMEgREeM5fX9HQ2B8f4+D0a28dTGdEbdPjsTvbyTmqVtG2j45MXNZm9lI4fOIjQ/eryYcI64BMKHJ8bBIRETMHRGfiIgZMvPdsq3jg2p8NfBGYsOxCyaNiNERMcdgEomIeSJinYiYotf2gXx/k0D39rHMzCgGkJeu7JPlfZOU35MPJp3y3hlg/ByHgzXI8h5T+XtQF9Te5T3ItLpS3t3at1vnooiYarBpjCQRMXUXy3qybqRT0upGeY+4CmJETF35e8pu5rG1r48EI2lfH4n7Qbd1u7xHQpm1ziPl0jZi9qdui4hRI6G8W3ko5T16uPMzvoyEOly1rMvviba8B2OiPejHp4jYJCL2iogrImLzyvZBn2giYkXgKmBb4OJWA6J6su4guRMjYtPB5KdX3uYqeXl3sJ+1VM6mi4jpu5W/QToOuBjYu1VhH6DvAwtk5j+g5/sqjchOj7eDImKriJh9EPkhIpaLiF0j4uNZdPj+bu6TAIdExKXAHhERg7xg/DAivjiI93dNRHw1Ij4aEWtGxNERsf5Ayrv4fjm/TNGFC+o45T3ItLpV3l3Zt4H9I+Iy4MCIWGmAaXT1PDkYETFFRJyAVml+OiKOjYhpy3MD/e6+HRH7RcRClf8z0LS6Ud7fiIjlqxuGq4JeLe9yvK0DHAYs3rvS2GkeI2Jb0K2ry+MxA0xnwYhYJCIW7OR9vdLo2rW7W8o1cbaoBEm7oQt1sNkiYoYowbHBNI5LeXdtxNBwNq5KmXyh/KxSLm2tToRhbWB1uz4ZEaMy892R0JgFpomIDUp5/xfG6QwYrvPm6C7u15+NiG9ExIa9tg/HZ5um/F4fYKSU90jjwEGHImI64LvAa+hWlN+MiJsiYsUunWgOBc7IzG2AZ4EvRcR1EbEVNI/KRcTKwCrAHeXx1BExVVR6VjoRERsAt0fE1yNilsF81ohYF7gINdR3iA57QiNirohYPyK+FhGLDvZiUcr2w8AJwBzA5hGxXkRs3qq0N0xnTWCSzDyrPN4OODQiTomIOVoX2YZpzQPsCayK9oFPle3zd3LSjojPAKcCcwLnRMSBZXsnJ8Cu7JPl/24FfBy4EPgUsFFpbK8YvUZpNEjrM8CozDyvPF4qIlaLiJU73acGKyLmBXYClgKOBN4EzoyI0zqtqEfEssASwDvA9pXtAxm5ML7Le/VOy7uL+/YWwLrAucC/0bl45g4/U1fPk11wODApcA3wT2B31EhfA+h4346IWYENgMWAPSPiK+WpD0bEmE4q/F0q7+WBTYDHqtuHsYLeKu/tgN8ANwGfAV6pVBoX6DSPEbE+cEFEPBYlKJWZ/x5AOjsCZwCnA1/s5HpUSaNr1+5uKA3QLUKdLpcDm3Up3b0iYqpBBLSJiO2BM4G70PmcTq7bJY1WoP6SiJhzMPnpla+TI2KHXtuHsiFzDvAx4FvAbRFxVkSsFBGTto6VgSjXkY9FxAoDfP+g6pN9pLcXcE1EHDCYdAar1HPPBX4AXBsR15bPCvAudH7e7EJZLx4Rs2Xmf0vgb1ABo4jYFfgaMAm6thARc0PPiM3BpN9BPlplfUREnEGlvENBpP+08tRhuoMq75HKgYPObQw8lpkXZOZ5mfkh4BbgRxFxaAwiOh0aJv9P4Idl0wbAzMAVwFERsW8HyW0InJOZr5eKwynAT4G9ImL+AWRvShQsmR04LyK2Dg1fX6/kvZMTyLHo4nwqsDrw9ZJG05PEFcDSwDLoYnFkaQAM9EK6BnBsZv4M+B3wOWArYGVgxw7S+QeqfBIRuwPbAL9GDcCzOqn0ZeZzqMI4EzAaWCsivgWcBozp4AR2ELBfZh6A9onFI2KyDgJQ3dwnATYCjsjMm4D7gF1QRWQr1LDtxKzApSWfu6EG+yHA5sAnO0xrsI4FTgK+iipY/wV+CywPLNThBefo8nMs8PmI2AYG3Ljqq7y/zTCWdxf37T2AwzLzlsw8BHgJWKv1ZDTroe32eXLAQkHpj6CG+UxoHzi75PFatD90WtYvo9FUCTwALBwRF6F9dEyHFf49GHx5Hw8ck5n/joi1I+KoiHgqIvYYzLVzIKrlXRr1Y4HngT8Dd0bECaUSeV5EXBqdjY55El3frgCOjYjbI+LLEXFc+d9te9nL9WIv4EuoEbso8K0BlFM3r93dcC6wGmowLAB8ppTNAlEZxdLJ54yIVYDvoHrY1tD5KL9S3vuioOaXgU9HxBFN319xBqoL/BPYKSJmjYjRAw0glHPt1sDDwMdCo2QWhaFrXIVGHc6ZmV9Egb/XgZWAy4ALQx0525fXdhKM/CpwIvAV4HMRscQAsjfY+mQ1P7sDawMnAyuXBuVHKs8P5TnqOOBv6Hr0W+AJNDruUtS5tV/JU6OpUF0q65uAB1tBlUpwteNyCY3o2QX4YmYeCSwaEeeg/emyiJh9CAOcrbJ+Epge1XdfRXWcsyNiy2Eq75EpM/3TwQ9qNJ0LrNpr+/zAecCsg0h7NKq4XotOyA9UnlsKDV+dtGFamwLXATMC96BeteVKGhc3TadXmqeiSswmqOL5HHBCh2l8Bbiu8vgDwNXANOXxIsAs/bz/E8CtlcdbA/ejiPOqneSlksbOqCd9X3Sxn7NsX6t8Fws0TGfqUrY7AUcBH6g8dx6wQof5mhM4EA2fWgn4PWr8bQvM0+D9y6ORAqAg4WSoJ3Ozsm0LYOWh2ifLe/YFbgN2QL3ycwJjyn51QyfHD6p8Pg6siYJ305fP+KWS39r9qJs/wDyoB+AKNBrpamA3FHT6P+D28rrPAZ9qk9ZawFWVxxujBt+Wre+jw7x9YySWdxf27amBvYGFWmUCfBG4uvy9PXBwg3S6fp4c5L60N7AfCjr+Bvh5Keuvl8/XqKyBAKL8PSOqXM+JGo+/QpXRI4AlGuZratSgGnB5o0biS8D5wMJolMdnUSDkDlT5j2Eq78XLfvgMsGU5fn8HrICO75OBHTpM+wvAV8rfh6JRGvd28P49gYsrj6cFrgdmK48/BszfMK1BX7u7VN4fLPveKHSuPBxdkx4s+8Afy/6+cIfpzgh8DwVFf4bObXsAZ7eOhwZpHACcV3k8Xzk3TFseLwXM1CaNA4EfVPb336Br/5PA1wZQXlOW936wPL4JBW9/BdwNzDdE39t66Lr2QWBX1Mg6HAU3Xwb+DpzZYZrTAE+j89IsKPBzCRoB1Og8wCDrk73SmgJ4BFi2PP4RmqL5e3QN7WifHGR5LwL8tPL4z2U/uBgFEf4B/HCIy3ob4Meo0+7qchx/tvL8ou2Oj17pzQRcAMwAfAjVlVZF59tTgZ2GqayfAJ5C9a7bUHvvraEu75H84xEHHSgRzL+iytzYEoWapPTgPgssiCKeA5KK3u2Nol3HAj+pPL06MHNmvtMwratRb/dXgWcz877MfDgzd0QH7CxN81WJ3F6AGsM/RJXsAOaLiDOi+fCwh1AEb1R5zzPohL1yef7k8rjOX4E3I2Lp8vgF4BfoAD82ImZsmI/3ZOZpqKEf6KI8ddl+OzAXajzXip6F3t4s+f8i6rFYvzw/KRod8VaHWfsLmjqxJaoYvY0qR8ujHu12ngbODy2Olpn5L3TC36zk6RA0GqJWN/fJ4iL0Xc0H3Fz+x78z81zUo914+Hxm3oPKe+Oyae7M/FdJa5ZO0hqMVA/6bGi43W7AvJl5UmrKyt7AEqX3+gDglTbJ/R8ank5ETJKZ16BG7Iah6S6dDgk9FVXI50UV8sGW9yn0lPc8gyjvVg/oFgxg387MNzPzOOCFSpn8CBhdeuN2RlPJ2qXTtfNkl7QqZpsAb6DzHWjEwKc6KOtRWWowmfkXdC7ZDgV//oMCSv9BFf62yrntu8AfB1remfkHVEH8I5qq8K/MvCI1GmZjFIga6ikirfI+AjU0bwYORgGlf2Tmg+X4vh5Yv8lIgYrL0YgGUHD7AeDliHg1IqZs8P5LgFND03cmz8w3UPBv2YiYBjWS/9ZfAl2+dnfDv1AAZR3U8FgMBUSmQefQtVAdoaORbGUfvxR4LjOXR9eYo4HFIqLpKKZr0GgBSp3uj2X7vKVecXrJe59KPeBfKBAFCqb9LNVLvwsaffDBTj4XOu7vAaYOTYVcAjgkM5cAHqUylW08uxUFK76FpvIcigKI+6NAzd/RueDyUt9oYktUPn/KzFdQcHQqdB3PiNgoIj7UJo3B1ier/lk+5wcjYkt0Xtg+MxdEoz32H8IRB38D3omIfUPTS/+KruN7o+D9S8DbEfFQw+N3C+ChQZb1fcBxmfk4Gvn2XWD3iLi6jBQ6m87O339D57OngW8CN2fmveV8+2PgU6WeOr71LuvRwIooUHM3Olf+ks7Le7D79ogVzc6n1ltEbATsjyr6P0UV89Uyc9kBpDULamiOAu7OzLvLAXMcOkmfhyoym2Tmr9uktTXw58y8IyI+Xt63GqrU3YLm8m+SmWt2kL/WyXIMqoBchy7sx6AT94dLA6ddOpNl5r/KsL3/VrZ/HTW6xqBGV+3FsFyUvol6OmdFwZpDMvP2iDgVuKw0cJp8rlnRxeHRzHy1bNsWzbX6Q0l/iszcrk065wO3A9/PnsWQ9kXTBH6EGkJvZ+bODfI0HRq98sfU8OlAFcSNgV0z8+aImK9SqalLZ87M/FMf26dEJ/i3gTczc8+a93dtn+wj7dGZ+d/QfOsFUAR9GdRL0G9Zl/d/DPhVZr4SmkqxAbqQ/Q318EwLzJCZn+skXwMVmg8/OepFnQL4d2beU46bDVHD/07g+czcp590Zk0NLe+9fVIUsV4f+Ey5cLfL0xzAksDjmflS2fZl1GP8O7TfNy3vJVGj+o2yX2yEeorfQN9do/IuDcyPogrCK6Wi/S3g03S2by+AeofvyMw/l22tfeow1NN7UWYe2E8aXT9PDkZoKPAklP0HBURfRFNKNkejIO5B15smZf19FCzYpwQMiYhDgc8DO2fmTaH57u0CWa0AVmuBv1HlHNf63ai8y3unzMy3y98fBt7JzKfL412BdTJzo3b56YZKeb+TmT8r+/XfMvOdiFgN9VhPhs51N6Kevl9k5lFt0v0Y6j3/b2o6xpfRuXs5YI3MfKrhPj5Z63vrtX0X1Ks6A/CHzDy0TTpduXZ3U2ie/qrofDkv6t37NQpk3YkCLFcDn8iyyHCb9KJUxidD08W+iQI2+6NGyRrA0pnZOHBfSfNoNCx/YXT+Htvm9ZOWfWhy1Hi9K3sWEDyrPL6kwzzsixrZP0f1ka3K8x9Bwfz1M/OfTT/bQJSA2aTlZ1l0TpoJXW/3RqNjLkMjYB5pmOZUqLf3F6gt8k5EnI56fC9C+8J6mflazfsHXZ/sI81t0DH/x/JZNi7b50QjCtdusk92Q0SsjYKYt6M65KSoN34TFARYNyIWz8ynGqQ1GfreHmJgZT1HZr7Yx/aZUXmdjuq/2zb8bHNn5vPl74XQiM1jgVMy865Sp346M49ukt5glTrAWHrK+ujyeE10Llk5Mx/voLynRHWIR0GdNZ2U94iXI2DYw4T8gypNe6BhyR8dYBqXofm9p6JhhDNThvygecg7Ass1SGdu4E/o4Gttmx31gB+EIvInAws2zNdkfWxbDA2Xu7LDz/gR4Pia5+ZBF8VHKcMC26Q1Bxo29WnKUHvUeHkSWKRhflYA7kULz7wIfKFsnxpNfzgLVUCmbpPOiqj3bOpK3uZCQwxHoUbtvDQY8owqGg+gRvmLKGAwI6o0nNRBWX8YnYj3pQzZ6/X8dWU/mW5875Pl9Qujhtk8vbbPW/bL88r/+p+89pHWSqWMRvfaPjsK+IxFja0pO9k/B/KDggVj0Z00XkFDyO9BvWetqSG/RL0Yr7Up7/nKcTVVP685CJi9Qb5WR0MuL0JBgm+jwMw8JY0LOyjvxcq+dABqZLS2L416DQ9pUt5l33647FdPoZEqDGDfXgkFaq9AgaI9y/ZJyu+tUANk+n7S6Op5sgv70e6op/vMsh89h85lrSkBH0cByEMblvVy5Xu/kMrUGLQI3UEd5m0pdA5Yt9f2Vnlv2aC85yxpHFmOkfV6PT8baoAsNsTlfTAainxPKftry/6+TPn+dy/H7/HABQ3S3QpdCybptf0gFNym93M16ayOevOuRefZdVvfOToPP4N6s/pLo2vX7i6W+2Qo6NU6f2yKGkGXoqmCL6Ch/lcAhzZMc9Hyu3WsrId6Zl+lTMNBvXz9pfGR8t19t+RrdOW5RVCw7oE2acyAAv5r1zy/AKrf1B4nfbxnk0p5zYnqJue0jh903TlwPH9nu5bv5HYUEFsX9ZwegUYhvoQ6XjpNdw50/W/Vm1rf34bounUJ8M0231lX6pPlPStW/p6x/D4and8WQPXEtlPfulDeUY7xjdE56FZ0nb297J9XlTL/OBpV1iTNr1Cpf7behzoimpT1KqgO8MV+9u0Xmu7bqE78axTYm72yfXN0Xb4T+PEQlvWsqENjHxQwvh+d81dE9bbvV8utQbofpzIdoVLejfbtCeFn2DMwIfygi8daaH7Xh7qc9hqMO2/8EdSwa62k3G/DtVda56MK2uWo12y2ynOTld+Td5De0ajXef7KtsnLAT5Hedz0YLqensb5KHrmaY5BFYpb25y8VkAX5z7/Xzmpnt3BZ7u9kp+1q++lg3nkqEG1T/l7Y9RTcl/JT7/rB/SR1o2VPC2EKlCPUqmMNMkbPVH8x1EjbRtU6WitbbAJsNVQ7JPl/U+Xz/ENNC93+spzU3W4H90CbF3+XhL1fB8PrNVJnrrxg0YBnIOGs92JItWPoPUEzkIXoq+iyuj6bdK6gFIJRD05C6ALzRyV10zXMF9XAZ8rf8+LRonc0t933k9a06NAzV3lmNkNNUA6ne99QyVP30GV0QeAjTrct28Dtil/f6QcI2N6vWa+NmmcTxfPk4Pch6ZBozZmQ+fA40tZ/xm4shxvm9FHQ7CfNH+MAtqblmNvg8pzo6q/G6T1WDn2v4sCF0v28Zp526RxCZoKNl85B/wLNTrmL88vRzk3DXF5j0YNob+jxvi9qJH4yWp507zxcT8lwELPwrp7oeHkrUBL23JHlfBNUIX/MHS3nz3RIqug6UKfa5NG167dXSr3w9BCYb9C58Zlez1/cdlX96EEWRqkuTlwfx/bd6fUJWgWqPk5Chp+E43SuZjK2h/lPNHvubPsz2ehes5GvZ6bGQXx9u2gvLZG15IxjNsIWQM1HG+ggznXA/zOlqBnTZsz0FD+v6Lz54dRnXgPyvW86f6EGo53ouvU53s9NwkK1vUbjGCQ9ck+9qMHyt/Vsl4eBbsvQSOqhuI4+RaqU7yNrk2XoQDRFqW8fkZPfbPJueQzqNF/LSUIVXluchSgblfWV9HTudNaa6NaTlsCX+7gM26O6sjHl+9xGxTsWQWdkz9Kg06NLpb14yjQeDqqF7xYyv5GdN2atIPybgWPx1S2tUb2N9q3J4SfYc/ASP9Bvcc/LTvSueXg2Y+yYF45UX206Umzj/R3ALYof2+NGliTosbG96ksPtImnbWB+yqPzwa+XnncUf5QxfPfqAJyOBqePEMnaVTS2h54pPL4u6ixdRsaet1v/lBF7l1U8ViBPnrcUAS7bSWhvHY54EeVx9OhSuOm5fGyNF90ag10kZoLVdg/ihr9+6GelCkapBFoiPJ5aHG4SSrPbYBO7h01+lCjcyUUNb0PVYgaRXG7tU+W9y+OLrwfRyfq89Fq2suX5z9M88UnV0cV/I+hQMitaGjqfuiCuvlA9s8B7tMzoR6Yz1IqcOWYewdV/HdDPcd7NkhrebQA0zzl8Tlovu0F6IL/kYZ5CnQ++i5q5E1TeW6Nsh81rrxW3rs2ash8sOyjf0ZrXUR/x22vsrqiHBtToJXrd0CVmqdpOOIAzY+/ovw9Cl2IfwzsWLatTK8GSc1n6dp5sgv70QzlPPFJFHQcU8r4NdSg+Vopr70bprcBcG3l8RdQ5WzF8rjRObK8dk7UYFgeLUo7FjWOWqM8lkXDydulcS89AYvJ0AiYc1FgZOZhKu9WT+5+aFraqujctBdwZIdpRvlcp9ETCH0IVYwPRKMZPtwwrcV7fX/ToGvv8TTs8aSL1+4ulfkiZV+eEgVm/okW1Nu11/cyfYfp3kEJqqPpG5ugXtSVaBggQ1N3rqk83hA12B+nZ0HaGRukcVM5drdB5++dUUNk0bJ90w4/252tYwsFt/ZCPdCt0Qbz00EwcYDf2zfRHXlAo7R+iRrSz6Nz1f1owc2OFntDnRrrlHPKFajO8zU0NQw0Qmfbft4/qPpkH+ndU9mPPlLJz3Jl2yydpDeI8v4AukYfgK77l5b98O9oxMPlaIrRmA7SvL3sP8eXcm2N7GgFMg9sU9afp9Qb0Xn7IRqO6m2Tr74Waz15fJdx77Iufx9Cz7o7X0cB36PQOf29RYYbpts7eLx9OXY3brJvTyg/Xhyxva8Dd2bmuqjh82N0Md+pLJjzCTRcrqN7/bZk5veAq8pc9vtRg+2d1Dzmn6F5t00sgXryWgsiXQJsEBF7lP/Taf4mRQf2D9AKrmsCX42IVcv/OCiaL4IzEzBHRHytzLOdEVVwz0O3l1m+Tf4+gXosXkLfwd4RsUiZI015/4tZ5uE28CfgtIiYtMzV/Rs6ea1dnj8XNVLaysy7UGBpf1Qh+lVmPpOZx6BgxjwN0sjUvLlLUBR/sfK5IjOvR71Ny0fD2xyV7/8FNJzwQXTy+hfwgYi4sswt7C8/3donQQ39wzLzzsz8BtqfPoxuM/hZ1PhfsmFar6DK/SdQL+jLmblbKevTUMN2SKTmpt2OKnAHlHUAPowa7OeioeJbAAtG+9v3zIoqZZtGxImokrYZOvc8jYaLN8lTpm4rd0ZJY9WImLHMAb0LjYhZrOlxW9nfnkUjJ95A8xD/ixomF6EKf7t8vYYqHAegnrfnMvN7mXkV6mWYpd0+WTwDHF/mEb9bjveT6dkfT0MN1f50+zw5WK+joN7pKFjQuk3l8ahh9BbqoZm34fH/GjpftBZr+z6qfO4ZEdN0cI4ktUbKHpn5s8y8GQWy7kTn8pNQQHKaNsn8Ge3De5fHq6LK+Q6ol2eFktfo893d9zoq74ci4ljUUPxFZt6LelMnR0E2QgsfXxhaiLBWOe7+hc6xF0XEQSiYuBeqIL+GbjnY5DM+D0wVESeVecB/z8wf0bNAapNbqHbz2t0Nq6J9cFM04mBrSmAjdJvKyTPzr8CsoXWH2oqIPdEUhNvKcXFOSXcT1Os3MzQ6nn8BvFrWTQHVTa5Egd8Ny7nzL23S2BA4vJx7F0YB9j+j69XxwLupRVibfK4o+8kjaF8ANRYnRQH8HSJizcx8NvtYA6PL7gSWCq298zw6Xn+CguULoyDXgmix5aa3qPso8PvMvLWcU1ZEHTkzoNtfLpGZR2TmRf0kM9j6ZDU/26N95a6ImAkdr6uiYMHuEbFoZr4yRNeFJdFoqCfRSI59UNDpVTQ95GgUFGt0G+XQQn9/ycxbUCBiDXTHL1rXgcw8sk1Zr40CSGTm19E16eAotyqOzhaLrZ7nL0R3qfghPYu1zhURZ8bQLNa6JPDb0PpRr6Dy/iMKhm8FfLwcX6OBC9pdA8phOxk6z91XNp+I6qRTonPJ4g327QnDcEcuRvoPiiCf3mvbkuhCflh53NEt0irpzEo/PS5oB+xoCDbjDiFataTxhYHkkZ7bEI1BcwePRg2jW4GHO8zLh9Cwq/9QiViii+J2/aQxGlUuZ6mkcx4K4GyMViv+SQefqc+eCDRi4Huo0v39BumsT5m2gkYY7It6VS5Bjb49Kbfh6yRvaE7hk6h3bz50kdwLuKmTz1f+3gw1+K5rlTGw21Dtk6iRNn0f+8LUqLL1Wyq3UuqgnD6Oot+LV7btSYN5yN38QRXyn6Ee3SvRMOwp0RDjM1Dl87KGac2HLtB3AatUtn8VOL9hGpPTM+d3s5Kvo9AIjSVLGd01wM+6StnHb6BniGi/w6V7vX8MCvgsVI75ucv2XYDrG7x/UUoPQGXb6LIvnVuOl8bDSenieXIQ+0913unyqNK6AWqMT45GnXytk/2okl511NJkKOB2B81H9+xCGVnUq6wmQ+ekG6n0jLdJa1lUoXoYBZu2Ldv3Ar41zOW9JeW2uZXyvhtV0A9tcuwx7rSG/dF6FDfRM6rhSODoDvI5MwpuHdbaH8r2U2gwgqm8dlDX7i6X++zle98eVdAvQ3f42BINC76ulHfj2wui0Us/RWtV3IQa7q3/dRXlHNXP+1vDh6dC5+pLynnkfsroEHROX72/NNA56COVtI6j55rXCtytNIAy260cr/sDB1TS+wrqYR/v5yl6bpn5Kpq+cSu6Ps2LgshHoYDg1h2kORMKSDxUvvdLyvbpUKfQUeX//k8vL12oT/aR5mIowHYZGrHQGmExNRrN2GikV5fKe0pUB90J3br5cnSOnAQFxecp+1eTUYyjUN1hrsq2NVCv+o7l+dqRC2XfHkVPfWLy7Dm+zmSQ5210fbsMta8eR+eChSg980Nc1juiwMpO5bnTUIdb42tAJd0T0GicgyrH7XRlvz68bt+e0H6GPQMj/QdFM28oB8vyle3TlhNpv/M7+0n3UHSxehv4Th/PH0EZltsmnTFl55++sm1Sei6Mn0WN7H6H21XeOweqtMxcTh7VhYImK//r9dYJpU1arWFR81S2LVX5e5pyIlu8QVqT9nq8HrrwvINW427y2eZCQ6I2rmxrldMYVGl4kzb360W9zOMM2SpltRy6sF+FFt6ar0GeZkLBgi+XE9jM9AQxrkEVj3valTfjNhRaw9DmQPPIHxvKfbJSRveU332eKNEQvA82SGvOUh4n0zO/fYrK83OgoE3b/agbPyggsgIKHn4bXRSqx99DqOL3C/pZ8A1VOtcsP615dItU/g4aBmpQpeL68nNx2TY3qnieXY6VH7Xbt8v7ZkeVuLGosbJm2X41GlHTtJxmRA3Qr9Mz7HYUGnnwGIrIP9lg354XVeqm6+O5Uej89t/+PhtdPk92YR9qTd25BlWgvo16S1vf/afK8TNXu/2ovH4utGJzdT2UakN56nL8tL2nedlvXkTnnjkr5RyVv9+gn+H3qGJ4KOPO9Vyacc9Tv6HXootDUN5XoAr1J9EiiK1r1KeAe8rfm6DAyEN97XO90l0ONTinL49bC/feRM883ifa7VfoOrAgPdPlFkLH7ono2D0VjWKqvU86Xbx2j4fy3watA3AucEJl+09Lnt9lAPPI0fX2Xir1sJJev4sGosr8LPQECVZBAdbW44XR6Ija75+e62x1gbepK38HahQt1MHnqR4fe6NgysP0zOE/ippFAbv4Xc2LRgUthq5126I1QaZEo0Z+C/y1vHYUHUx/Ku+ZAk3l2K7s11OU7UfST4OULtYn+3jvZ9HUgGrax1ACCUPxg66Xn0XX6TOoTE1C14Idy+fraNoRCjy0zt0bojpev2u1Vcp67j62LVj2ydNoOG2i7D8fRWuJtKZzLcrwLta6GQpontlHWR9Gw2tAeU9Xg8cj/WfYMzAh/KAL8tdR4+UINCR5I+DXA0xvMdTQmQlV0m6gMj8bNSg+RD+VhMprv46Gs15Nmevbx2s27CBvF6GhpZtUtlUrjcdQGiYN0joOReAuR43pBSvPBapUNZrXVDlpVXvUD6eD1VfRkO/X0JDnW+jpKViq/N4O2K9BOmfRE01cGF1YL0C3fBonvw3SugpVDr+GKvY/RY2t0ajyOAfNKvvnoQvC/L22z0ZP727tnMhu7pOV/ejgyvGzPOptmr9sm4XKiu9t0rq0lNFO6II6Hxrx0Vrka2eGaJXa8j1dhypPv0K9Qj8seVqBnjl721MWMeonre+ghuOT9B2oORi4tIPyPhQtZngSaijtVymjSWi+yNvVJY2tyr74CAogzEpPw7ZthQEd92ehCsaFjDuS4pOoN7VJ4OhCyjoEKKixVPnOW/vSGugWg/2l0dXzZBf2o9+iaUn3o3nff0K9HOuX51dEFa3tabAuBQqkvomG4d9HOdcybqOm7R05yusuQIGMb6OhydVeq9aIrX7n7Jf3nl/+bu0z1Ybsp+lwLYEulfdn0bntNTREdTe0EOmKKJAwCQrC/IU2o7NKuveie72DrpOtu4XMgeoJG9CgcwFVUr+NzgcvoR64yVElfc2yH/R7rNDFa3eXynyV8vkPQ5Xx1iiI1rV8F3S9W5CGq7KjwOrh6HrQGolRrRMsTYO7F6Bz+KWop/Fxxl2kdfKSfrvgw/EoEHImZdRK5blJ0LWqcSMfjQrbp3KMjUbTsK4v++9xqME23tarQI3XW8t++AKqK/0CDeP+Eqrv3EvPegSNek/RtWMlKovolc93QvkevoMWcq79bHS3PrkyClbtX02j8vdyqJHedp/sQpmvhwJrT6Pg9Y1ohEcrALgHujacTJs6RUlvCVQP6fNuIuU88Dv6udMXuv6fW46Ta+m1rgEKLG3SwWe8C9VPrkDXu0PRcf8JeuqnQ7GORKusjyj78e9QMKO1Zt0e6JrcyTWgK8HjCeln2DMwofygaOvHyw7/W1Qh7vPWOw3SOoHKRQkNub+LnuFAje/cUHbwM1GF6GzUaDmsekLsIK2PlYNpM9Tj9D/DtNCFpclKxWuiSsH86KJxD+qlOgZVZqZElYo+T14oGnknlUWFqCzGhnoLv0ylUtvwM34RXYx3Rj1ev6IyDaDdyQtd7A4Gdi+Pf4IWmDkezUn8Rgd5+QDw08rjldEF4zRglw7SWRWNpPhOOUEdyriV9La3JuzyPjkVaux9rDz+cdkvz0SN5EYBg/LehancegwFN36Egng3UII17b63bvygUUZ/QBefB+np7XoDVehOQpWuFVr7az9pLUZZXbfsU61exUPR/O/RqOLQtpKIAj330lMxf7qU9xklb9t18BlnL8ftdJVts6JGzcU0XzR0dSpDolHA5Ur6ud1kTTofQY3qj5bHF6Ge8PPKvt5aiKjfCixdPE92YT9aG1XGVi/7zQLl8X3l+Gjd2SGqvxuk+wV0TtkP3aryKTpceR1dA6qLR55Wyq3Vc9LkzheLop6aVgNx+7LvfBedf0ejXsehunPF2oy74OACaIrRX9H5/3oUVDiCntv7tl0AthzDt1Yen1W+w5vpYDE8tIZFNZ3j0fDwmzo43rp27e5Smc9Y9oHDUeX8+VI+y6Bz+rxoGPYyKEDZ6C4tpXwPQuf+01HAdjrKHZdQj3y/gbZyDmiNLgnUi/47VN9oBdza3SlkXXSeXK18XzuV/Wybko/Zyz7fyV2sfkxl4TTGHb2wKApujdfFRFHAZCyqmz2K6gDnl5/7yz7ZOhd3sgDhj9G55N6STmt60JLovHUQsGo/7x9UfbKP9K5CgYjvUwLGVM63JT9tG+ldKO8ZUODqAlS3eQl1RlyBFpAcjRYRXobm54JWAHG3Um6tW7lOW3lNbVroXPIY6riaqxxTf0X1m4FMed6Icc9ve6PbMV5Fz1SI8T58v1LWO6FFH19Hwfa70DVgHVRH+Xx5fdOF6bsSPJ6QfoY9AxPiD23mB7V572jU0Fuy1/Yfo8rUCqhC3PZAQlHt0WjBsm+hBsQeqGftUjpvVN8PfLr8vXo5mVXvb9vJhWIfKr33qCF6ELqYtlZB769hdV0pk9tQ9HupXs+P6e/9fX1n5fcalDnV5aD+K4qq71fKskm5L40qLWsAZ1S2L44q2U0vXrOhoYit1ZsXLZ95RdQwXbphOhsCe5W/P4qGPj9cOQEeTP9D5kehCtDivbZ3vE9W3vs51AhejHFvdbktDUZ1VF4/BwrO7Fm+o1+V7bOj1XB3a7cvdesHVVKvQhX9DVHw6i7UI3MRPfP3m+zfRwAHlb/XQw29j6Fg2E00CD70Su8UdB44ibJacNm+JgoodXLsHkavW6KhIMbJNJzPihqLrREngXrx7qWn0nkYlVue9ZPOcuWzHYjmot5Wee5rlAZSm7IeTRfPk13Yj2ZDDZ/zS552QueNBct+sFFlH+l3ikJ5XevctiU9d/hYClWMXqUnyNnk3LYZlTsllH36MjoY0VOOy7+iBvpqaLrUp9AUgfcaHsNQ3l8px3CrvL+JzrNboEbJuzSc9lbSnQ4d/+ugWwBeULZvi3poaxtCvdLZnMoQbTSaap+Sv0Y91nTx2t2lMj8GBRvnRaP7voWCY39CjZJrqNxVoWGa6wF3VB4/ggK111IC7TS400Ap37PL39Ve5sOAUxrm5buUOx2ha9MT6Nx7fus7o7PV77/IuJ0Ih6Lr7z30s85Cl7+z0eWcsw8aLXIVqucchEYz7lTydS0Nbw1c0t0OuLny+GhKnaeDNAZVn+wjP7eUv79ayniZoSjjPvJyCLq+nYsCTzujRZdfAB6svG7zDj7fiigIcRZlTRkUtNmg4fs/D5zba9us6Pp7Qif7dXnvuqijp9WgXqucD/ZGAZJG9eVulXXlcavz8DdofaqBXAO6Ejye0H58V4UBSK3o/e8Bvn1UZt6bmU/Aeytfg3oyP4FOkN/Jshe2ycd/MvO/qGE4WWrl25lQQ+tXqFLaVlkRdCZ0cr82IkahhuczwJkRsXz5f52sLHs/sHlErBMRs6OK1eOoMrF6RExb9xkjYvryv7+GKrIPAxdGxMnl+bmArzQpo5ZW3lOry18bEeuioXffReW+SGb+t780I+IDETEtOtH8o3yWRVvlgyrKS2bmOw3z9BJq1G0dEY+jk+kPMvOnqILd6G4DmXkdOmGRmQ+h1aWPAbaMiGfRIna/7uf972bmPWgIWXU1/d/T4T5ZcQ+qwN6Egg8t75btjWTmi2j//iyap3tx2f5n1LO2WnncSd4G6nkUod4RrZZ7AmogbIAWaVoM3YFglXZ5ysyDUCS/ZevM/Elmno0uYGu2SwPGWaX4LNQ4vw8NdWyZCd2isJNj9/vAuhFxX0SsXvLxKgqQbNgkgcw8B1WkSfknChwsFBGroXUbftVfGhExZWY+jHotX0BlfEjlJW+h4a/tyvq/lfPk5AM9T3ZLOe5PR/vS6mgk23GZ+Xu0P61Zyujj/R23lfRa57bLgIfLyuXboZ7QNVBAqsm+NF9mXpmZN7f2q8x8Bu2nG0TEkRExdYP8HIrmed5Ufr6dmTdm5umoF/rj7dLopkp5fxg1gNZEvY2voilToPP5tcCRZaX1Jun+DZ1n10WBzJvL9otQw32Jhll8AFglIg6LiC+ixtvDmXkYWmn8A3VvLNfuGdGouW5du7vhcXS+/jMKHC6BAll/oGcO9/YR0eh8UvwV+EtEbBERxwNvZOY6qFG0Y0R8OpvdaeCnwLQR8c3WMVH29yOBOSNihYaf74iI+AI6P30xM3dD1/M5ymr8ndQRFwPGRMTG5U4Bi6CGzVXApRGxaAdpDUg5R16Pjs9tUQ/tJmgk3GGo1/tw1NP/0Q6SXhJdK1t+htZ1AqDcHWFUm7uO3Ad8NiLW7rQ+2YePoqAD5Zx0M3BKRKxU8tPRnQIG6V50fFyIgmpzo2Njd2CBiJgyInZH00Ybfb5SfzwT1QnGoiDLl9HdixZocHeXG4BJI2LTSpovo/r4TGjURyfuQyNC9o2Iw1Hg6ObMPA5dw5ueJwerVdatdtfcKPD7AvpcHV8DUB10TGnn7I7aYaugeuoJrTvZTHSGO3LxfvpBlblzUMPzY72e+xRqVN3cMK0Nyu8x5ff+aPjui6gSMxkD7Elj3GHu30A9Th3fuxU1zP9Q8vWdyvb76DUnsI/3jqIyrBn1xp+FKp5/p/nQxlGo13KJ8ntK1Ov0NLqXdNPFXSZDjeGNKts2RsMbz0cVyPtQZb9dWpOiHvnR6EQ2M2p8zl95zZO0iXyiESfLM+5CgdXvbip0ke83T9QvXrheh/tkoArPfPQMU96mfF83okrI45TF9hp8tk9T5nyXbfOgYXzfQY2Ax+nwriMD/aHnXu27o6Gtl5Tjeery/DWo9+Lednnqp7xbQyVvooM7FvSxn16Geqs2Qz1hTcp7NGqEL4suqJOW7+6p8t3thIbz9ruYXfneNkJrGEzX2lZ+r4UqJQ822CcnL8fVApVts1XSCtTgavvZqscFOp+diyoLgzpPDuC7mRU1EFoL4E1Zts2MzlMroWDdTQ3LKNAohTnQAqKj0dSOX5bP10mP52Qo2LtRr/Rb++SyqBEzR5t0qvPNJy3HyBS99u1thrG8Z6Gs9VDK+8/ounJ/2bYQZSG6/sq9/G5N39ir7Is3o8Daauga325NgjFoTZJZ0ZDgs8v+2erJHoWO38bDXHuV/4Cv3V0o+3nL/vRkOY5fRsGbvdCaMHOhUURt1++opDkJOv+eRznfVp77JmXdoYZpzVX2xT+g6/hU6Lr1GxosIFvS2BcFMk9l3EX1HkbB2k7KaxoUpD8fXbPnqzx3EkOw2nzZH0ehwPdYFEi5FZ2LjkFB1qXQOartSKhKuktSWZATnVceRees3Si9/w3S2bF8X+fSYX2yVzqtofvVa8n+wInju4z7yEurTvMsCoC9N00QTYk+pGxrtzhu9Pq9CaUnHNVRL0Sj677cMF8blTxdz7gLJP6SMhqyQRqToc60RdE0z/3RSMTWOj6To/PDgk3S60JZz4YCBc+iUR4/RaPGDkIB+8fK69peA3qlux7q9DuayohMFGzrcz2lCf2ntZPZeBYR09Fzy69l0IXiIeCtzHy0vOZa4NDMfKRNWnuhRuUnK9tmQNHXhzPzzA7ztj3wTlbuLxoRk2Xmv0oP+7loiPAZDdJaBjXqnkr1fhMR06V6Z4iIsejuFLX3oo2IUVnTQxK6f/hHM3Olhp/tO6gSE2hBrH+i4Vv/QL2Pj7Y+a5t0jkUn01FoWOnZlec+iSqgmZmPN8jTt1AFb9+a53dHIxf6jXpGxAmo9+xcNMzxwV7PfwE19LZukKc+yzwirgPGttsny2uPRw2ZxdDw1GfRhecPaNTCn4GXMvPGujR6pbUYahRdl5nfLNs/hHor5kP75HHt0hqsEjVeCQ0n/QPwpVQvZuv5TdDie3ehi+AWbdLrb//eGdgsM9dskK99ATLzf3rMy3Hyb7SOwvcbpPVdND9+WhSQAc33ux9V1t8AXszM69ukU/3ersnMQyNi0sx8p9zn+PfAI5n56TbpnIAWG/pi6L7Oo1CF/ufl+a+hY+TL7T5br3RnRxf4hzLztE7eO1gRcQuqMK+OzpFfT/V4tUb5HIjuDrEz+t767Y2NiG+jBtC8aDTI06gxtADwZmbe2+TcVtKqnt+Oysxz+3jN6pl5d8PPOjrVg1ndtjOaNzokIw4q5f0BdJ68EfVaP4eO1fVQkGwdNILtlobpHoqCTlHS+xaqBO9Sts2Evr9+r8MRcWZ536zoFmu/7vX8KegatUObdCIrFbnW+aWMDjmfhtfubii9tZOj6+uUKED+KvoOlkHT+WZGPaAnoAXWftFB+lOj89qC6Di5HgWmz0a9sr/s571fQ+emV9HosfNQ7/qRaLTdJGgq3AFt8tC7vL+Mprvche5esmhmbtLw88xEzwKIifbH+VrHWURMic7BW2Tm003SHIiI2Ac18D6Irtv/ROXye9SpdRG6LvwV1TGP7jD9yMys/N4d7R+fRrfOrP1sfZT3NJn59/L3WNrUJyvvmxt1OhyQmb+vXocjYg7UsP4HCtI0GbkyYKVePSkKFsyMRhn8CZ2b/oT2ifNQYOHwzDykTXqtcp0+M18v23ahHHOZuUpEzAn8KzNf6yCfJ6LA2s3omv5GZn6u4XsvQOfDpdCUwtsqz41Co32n6PQa3qlWWWfmqxFxJbpW3gY8l5l/jIg90Dl8o8z8ccM0W+fY2dH55CA01eRNFEh8HXUSb9LfOWmCNdyRi/fLD2pYXFj+XhrNZT0LnZgvQQfYfA3SmRpFIluLy6yEehY+Q2VVcJrPh5oJnax+hoYor1e2V3v7Z6PZIm1LoGDIFajHc79ez8+K5sgNdCREoOHFTeeOzku58wWqzOyJ5o+9d4u5huksTplvhiqgFzDAXhxUcanOZVwHDZvdm9JrgYYItl0BHw1DvhNFpb+Depg/R09Et990UC/LBxn3tm3v9TKWx23v6FBeNw/wf+Xv6dDUgiNKvvpdgb2PtBagZy2DMah36WxU0fxy2T4FQzRvFy3ksxMa/vsEuogeXY7fQ8t3t2q746TdMVm+ry/RYG0LVMl7vRxn9wOfGcTnm7Py3U2Gzim7oB7CTw/yezsLDZnfoWxfkTar+5f95zbKGgjle78MraZ9Sfk/0zc5RmrSr/agDMk9ldECcDfTM2T7cVTJeJxevUloFFG7Xu8FUHAWFIQ+vuwHY3un1yBvfZ3fFm6VTzeOM3T+3bLJvt3N8q48vgw1OH+HAq0X0nMbuLbz4ivprF2+s9VR5f7bqBeuNce+6d101i7f1wfQSKWD0RSz/ekZHbMhg5z/S8NrdxfL/bsoaPw4Gn11J2XkDFoU8RE0jPdE1BhukuYK6NxYvS5NhgJtF6GpD19vk8Z6aN73VvRM47mRslAvug42uqbQs6ZINT8bopEUB7c7dnuldTU1oy5QEO9ExnNPeMnzZaVsXgH+hUZX7otGMK2BgubT9/7cbdId564HvZ5bDY1mPKqD8h7da3tH9UkURPsVsFOvMm51oM5MBws3D7LMz0PXg1+i68CblHUF0NSy81Bw7VoaLLCJ2gBnlv2ldceL1VDgtPGozEpZV28LOkfZvxel3MmkQTofR+e3KdC57jjUeXQiPSM9PsIA14obQFnfhEbMvYDqN6PQuXetcty2Rnk13bePRAGCc+hZ4PGDaFrfUaje85Wh2JeG42fYM/B++UHBglNR5eA+em4vNhW6gDUd/jMd8P3y93zoQnwUqghdCMw6gLx9AUXMNisH18mogT195TVNFta6qnVSLieFR+g1XLPuJNg7/T4uEq2TTeMhgOVAvpmeBvm0qJLxVTQ8rFFZocpQ6/uaqTx+jAaLu/WR1ukoQjmm7BP3oh701oKGjYalomHJ06AG9Y5o8ZyT0AVob3qG0NcNi1+ylMGF9FExQUN696fhKtz0LIK1SGXbfGhBw5/SIChWed/HUQ/MNOXv11DlZafy/a3cabkP9KccEz8qx8S76GL8ILpVz3koUn0fZWHJ/o4TNNxztz729T4rRm3y9TnKfYHL3/eVfLZuMfp5Gga30DnlGiqL1pX9fEtU8W+0n/fzvX0FVYAaf2+oIrQJPXPH50bTYI6hTONoU9bb02sRPspisq2/h2ofKv9vk7Kv7FuOuW+goM9PUU/lfOV1KzfZD0q53kBPZX7GUsbfRkGX6forn15pfZdBnt9QQGiGXttG0UdFdCjLu/L4CVTJ2xudJ39EZfg3DRt75fj9Tvm7NU1wGdSA7eROOIfTs6jbrmi0yD7oOnANCrS0CzROXvm7v2NhqIJjY1HQ6RMoyPoCWvz3UtRw/yKwRodpfhKdd09EAYTJez0/L5XGXz/pHERpoNMzymNDtGjbOpXXNT1m+gwwdLKfozrSY5XHny3niOPRuSrQHXbG2+JxqP55N5qKdAPqgDoHjaz7B2psX4g6AN4b2t8g3dbU21MZ95aw750PynHYKGjXT3lP0fD9a6C61uooeHBMr+eH5Bgp/2ssPbeq/QIaJn8lauT/tZRb6+4ubb971Pn0IBoZsAsKzA341n+V7yh6788dHB/foOeW5V8t54PN0bntCQbQTulCWT+BRhpdis7X16O6xZV0MKWAnjt8rI4CIruXY3mNodqHhvtn2DPwfvlBFblj6bm4Vu+1fDVaXKdJOkHPCurfohLVQg2ZLw0gbx9Ejb6PoqGvD6Khe3vQPMI4D4rczVHZdhLlXt1ouFJtryi6gE1NafhUPmurcjZ1J5+pksaR6OJ3IloV9sCy/WQarlZMH/N6UcXv3L6ea5PWTMABKKjyX8a93eTplBEfHaS3CnBC+fscFHw4jnKXhX7edyMagbEoilSvh3rRP1aeXwhYqcO87FdOyBswbqX2RDqcs48aQI+jOZa7VbYfQq+L/vj8QUNiL0cX5wPRsNhECwXdVL7P4yl3sOgnnQ+jivQFwEJ9PD8zpcHQQd6qx9qMqOfoZ6gC+H90dieFXVAg6SuMO9roSDqbh1z3vX2TSkOuQTrrovnGNzPuatqfB65o896PluOrz5ENqBf2R52UdRf2o4VQxfUEFDz4OaqkfxY1Gs9FlfMXaFYxD9TAPxyNZvsB5fZhaPhrJ7dPHdT5DVWa7qP+1rqzDmN5H4vm1b+EelRHoRFxhwKXltduTMPbcqJe85sZt7E5BgUUfwDM2TCdDVADZg80+rDVUJgCXcf7DZCj89HxqHfyf8od9aYN2doGaNj1heiOBVeggOu3UM/12+h6+wN0jms8Uqjs42PRNfMudI5bhJ5blja6NSAaEfYgsEpl27QowHgVbUZlAB9CgY/TGbdDpdWYXqy6veFnW5aeu6B8qeyvn0X1qMdL/obiNsMHoIDavigQcys9t0/cEnVwXEplpFaDNM+lZ/2H36GgWLUXe74G5b0zqrv1dX76AB3cbaJ8po3K30ugxmLrblZdGVXVMB+tu/tsic5FU6Ch81eXMnsEjQB+runnQ3X31mebBNXB9qw83+8aAg3KemFgtQ4/5wpodPChaH2T6vnyNIagkd2rrKdB56en0B3bXkMdbtOic9XlTY9fSoCt/L0Xun6fiOp2xzEEoyiG+8d3VRgimfm3zNw3M8eii+gmZYXY3VEF97yG6SQ6GN9FvXCrR8QC5elJGXcF+6Z5+yUaArwoOonOgk70S6MFBZuk8RxqiP6tsvl0dJEHHVCT9ZPEmag3ce+IeDAi1kxprUy8V0Rs2+wTUV1t/hR0IX4b3bHgyLJ9aTQEq63Uyv6tdFt3HDgTNSAP6fX/2qX1WmYehRaeOygzry7zvUAn6GmapFNJ7z7gX6VsPobmKV+Heor7FBELoov4Cam5hWuV970FfDciNs/MZzLzgU7ygvbrn6GI7B4RsX7ZvjI0O9dUyvFo1LA6Hu3jK1bSGu9zxir5+AOKLm+Fhrg9gBrTZ6FRByuiC2+7FbQPRxex3wDXR8Sy1SdTdy3YJ3X3gUZ5q+6XmfmXzDwczRldE+1bjVdSz8xTUaN2CeD4iPhCREyKGvAvN80T9d/bKqiC1DQ/t6Dext8Dh0XE18tcxS+iRmh/dgbOzMw/R8SKEbFvOadsXtJ+Cc25bFvWg1X5rp5BFZR30Ln7sMy8Ch2rgQJ+X0LfW7ZLs7zmIjR6aQ7g7sz8TnnJB9G5pJEunN8OA85JrWOxaESsFxFHRc8dOV5m+Mr7XVS+u6LK+RjUyLsCmDJ0B5+vo/NWW5n5W9Sjd3ZEnBsRs2Xmv1N3pVkMBUmapHM9qmy+g86bb5bt/0CjwSZtk8QaaAjwGsAuEbFIREwVEQuV59di6FYrJ3UnoVvR8fks6mFcGzWADke91zugaTpzNkmz7IuXAqeWa+auKJB7EvDJMof72PL/+z3XZea9qGL/7dDdK2bOzDdSd3+Zl/bf2zmoY2MWVEeZvqT7n/L8xiho0lhq3aDfRcQWqAGzT2ZekVof4R50C+qhuBvGXWiE2HKobB9AgfE7UL3gCdRQn6lJYmWdiyuB0zNzFzTKdn3gpxGxami1/nZrE52L6otR3tf7TkxL0399spqfaVGA5kdl7ZVfoQDWnhGxdqlnDsldR1LrvtyHyncvVOZj0HHyf2gK2nyoLvzfmmTeU9b8uAt4tlwX/oMaweuW5z+DOgX6066sl0KjdBpLrbd1Ctp3jkbXkpbl0Xl5vOpV1l9Bo3OnQNOefoY6GN7MzCtRx+ds7dKMiGnQ2k1XlfPTOigAtTuqP83RJJ0JnRdHHCYR8Q10cD+K5mLe1uYtvd//YXSxWhJdjKdEvR0DWngqImZDPcbrAxdn5mERMUe1UtkgjfcWsYmeW9qcjebMzZKZm9a877Oosr8+OqFcgsrmBjT889WImLxbFc8SrNm4SVn1Xpin13OLoYt9v4tX9ZN2dXGeL6Je+bUHkM6nUOXze9lw0aLQood/RRXXJTNz+bJ9M3Ri/0a7BkxNutOiyuwSaCjX1MDTOYgFcCJiT3SSnwP4S2Z+ZqBpdfA/Z0QX9Pkz88GImAeNink7M/8vtAjoRWhRo02zn8UMQ4sn7tL6biPiAFQB3j8zX4l+FkscQL43RqNNGt/ysvLeSVGjc0k0/eFdtEbIHgPMy4C/t8piT9OioMNXUW/8y9l+oah9UK/mNyPiYVSBGY3Ob1dl5n4D+TwDERG7oUrJ31Aj//vZs7BXa/TYKBRQ2j4zP9Qgzep5dpJK46X12TfNzJW7kPe257fQwqHfR4HQ+9F0iedRsOlTwBGZ+cPB5qWp/sq7PH8qmgqyU2hxyfWAe0oDp5P/MzMa/v45FJCYHvX8t10Ur/c1pVz/DkENtURztjdrk8Y86PxzFwo2v4kafxdn5lmdfJZuCd3ibA/K3OWSt2tRg/NkFNC8H9g5y8LJDdOdNCu3OI6I9VBj6BNoPZ9b27y/erwsh76zNdAQ/SnRVMb1+nn/PmhEyKYRMS+qlzyMGlQnZeb5TT9L7zxFxJqoV37akta3ynnhEWCPbLgY6UCUINMYFNT5E+o4egV4LTP/ExG3oSDtVKiO0HaR5V7pj7Mwa0RshY6ZxYFPZubNNe/bA4183KQ83gHVbQ/r8CP2lXa1zrUT2hcO7bT+3YV8fAaNOhqFppbNh0bl/QmNFGp0S+9Keu8dIxExBepd3wMF3sZm5h0179uDLpd1H+e3j6Og3a0oODFFZm410PQHkJ/PoGlJgRaKvSgiVkZBspVRAGepgeQpIhYuweTW44fQek4/707uR6gcAcMeJtYfVAFfr/zua0GdxnPX6FkorPeco2VRZX9NKrcE6jCf1Vu4nNOlz95K89uoAVI79BLNXd+n8ngTFEj4NvC1QeRhdLW8K9tXYADrE1Q/W1/pNiyP/5mPiYaZjwWWa/KZ6va1yt+1w+7KvrgqqnR9t5T14ZXndwGuHOznogxHRaNiGi8+VpP2fCXPyzN0c+OuQY26C9Atg1pD0wL1Bh6NKkG7UpleU5PWcsAylX1yHjSX+dz+vqsB5vtDdHBrqjblPlMn+aPXHNiBfm91/5NyG602710QDVM/E42+OL7y3CxoOGajebFd+C42QMOjP4lWXX8QDQH+cjlepkPXh9FoqHsn67dEzd9rM8gFCDs5v5WyPhwNd76ZMvS6PPcl4OChKOs+ynvzSnnv2Lu8K/l7lwbXzWqZVPfPcn77KrqmTN/J/t0rnWVQAGZHYOaG6WyG7sACGr3wNzT6YxOGcE2JcoxvhxqDlLKerFLOd6K1IfYHLhnE/6mW1+HAj5vuz70eT4amjh1V9pXa+eDl2DyAngXQDkM9jq397Xd0uPhvzf9p3dnh26jRd+54/s4+hxpyx6C53mdS1ukpz7duv7oYuv7NN4j/Vb1F9OHA9f19V6jB21pQc3T5rh6svOZMGk6hrfn+q+fLY6jc1nM8l/knUZ1hS3pu5TqqfOap0Loyu/eV537SHNXrceuYOwhNzzq1TVnvTVkofHyUdWX7AmgU4qb9HW/jq6xLOY+qlM8taG2C+dBUg/kG8/nKc/sN5vw2If0MewYm1h/Uy/bTcvJ9nNKwo8zzRHORPtL7wK9Jay/g2l7bxsucLGCm8rsrC/KgNRP6XTQKDQF/Bg0pWh6NwlgbDY+7nA4bi2hBtTG9trUqa43mH9H34mqjK+l0VDFDQ70m6ZVW9QLWNr1q3isnwKj83W6e5xyoMv0jNP9s4bIfXo96u75cTqJLdfC5pmDcisF78wU7KSM0paX3d1ZNq/HCgV3YZ7+AGhxjShldjCrlt5YLTati3OTY3Zk+7ueOGuXXogr/oC6kAyzvvhazG13Zl5oeJ7OgkSWT90pnIAs+Duq4RefJ68r//woaJvko6omduhzTtw3hfnQsasC21mk5GN1p5MSyX7Uaok1X4t+XnobiOMdcp2XdjX0JBQ2WRI3e/dGc9vkqz38N9YAPaXlXHh+MGionUjPfn2bB2qVrtnelvJucR2q+ow+hIe2zoqG3+6PKa7/rrXS5zL+IriVfRHd7+gWVOxahIOm56Px+G80X/+2vgj4pulYN6O5MHXy21nE1Zfk9OQo8Va/Dp1PmyneSbl/HLLo+f7ocV40bawP4XKPRdf7D5Zy7cTlPPkLP4tZTtvJAB2sb9PfdlTTH0iZQh66N8/Tadi8avXYQZW2Swe5H5fkZGeDdeToskx1QAG0LNHz+WWCbyvOzA8c1yXPD/7cOCorWHm9lPxjV+/sdRFn/T8Cz7OtDvTBuq6w3rZZ15bw5O5oq03rcqKOFfgLqdCHANiH9DHsGJtYfNCfuoPL3gWhO9NlotdEPlQvEtg3SmQ1dkJ9EleJ5y/bWhWdMSWtQByc9vYWdRBfnpbKASyVPrbT6PSEzbg/CWijQciLjLqr2cLsLTeW1S6C5q+ejqP2HeuVrZtRo7rfHkS4urlby9H00VPNSei1eWfJ0QLvvD40q+Tu9biVU+XuGsp/VNrB67ZMHl/3xO6jH7Lzy9wYNy3pRtCL1GagyMFuvsp4JLYLTdrQB6t24py7vqGLcKK1u/KDAwXEooHIOCqrsUMp3z0qeTqWfAFvZT36Dpju8953RMxrjQ2guaNseRjQvePF+yqjRvl1e25XF7FCD8T4U3Lu+d97K52/7vXVw3Nbmh57z5C9KWpOX8v1B+fkuqkw0XjSwC/vRl9ACeG+hUVW3oN7gfcrnHIMqrv0etyWtJdEw4rP6+o47PN6a7kv9lfec5Zi9EvWSnoPOKR+o5OcxOghCdqm8H0WB6A+VfeHHpXxb5d26Ni1OgxEC6Nr6Lrpu/885Gp0H2y5CiOZk717KaU0UFK82Ghej9Gp3+Jk/U/arG8vjSeuO6/FQ3qPR7ZdbCxROUfaJF9C1YbJer286kqLPijzjBtr7DdqUc8oWKJi0aO99ucn3j6aAzE7lbjC98jBn2cc7GU21dF+fq/fnoQuNx37yMAuaDrommiITKADySdTD3LqWL0KHt3Zt8L+n6ue5+ei1CB8914BN0Xn9IZrfIrrf/YgO7hIxyM88CgXXdqNnQdPrUN324laZoGBy0zshVYNXdY3Z2gYxmmbzAH13agykrNdFa1rUPb9I7+92PJd16xrQWnPlsVZZV773RdudAyrpLt3gNR0F2CbkHy+OOB5ExNyoInB82bQtupjeiCrlX8zMa9HJu52j0e3yPowOgM/Ae4skgnoOJ8vKPNc2eds0IubqvdhV5f17RcQ2TdJCDc95K2lkr7T2bJPWiRFxdkTMnpm3Z+aKaG79SSWvJ6A51s81zM/xKPhwGhpCuH6vfLUWoPtHm3R2onuLq52GRpxciYanbx0Rt0bE8uX5GYE7G3x/s6KGw34R8WxZ1Ofd6Flkb07g9uxZTHIcfeyTn0NzkR9EPRy/zsx9Uot2NXEyGiVyF6qIfBrG2S/fQWX4rz7fPa5vAhdm5r8jYr6IWCkidoyIj5Tn/w2c0TCtbvgFGikwBSqfpdEc1NHAxhExF1q348zsfy7iUWjdiWcjYrGIGIvmQ38tIpbMzF+godyvNsjTreW9n4+IOarHb5m3+Sq6+0G7fRu6t5jd4WgO+eZoEcPNI+KysuYGaEHSJt/bCTQ7bvvLT+s8+SF0nOyamb/IzC1Qw/Y41MPTeIHGwcrMc9HxMQk67lYC/p5axHAh1DCZg36O24qxKLCXwP0RsSi8t04CZXvTY+QWmu1L/ZX3AWjI8WboOjYv6l3eusx13wINJR+yuZ6lvL+PGj/3Am+guf+To3KeJjtfzG5nVPZzoUBZb6vRbBHCH6LzWGs17x2B9SKitejYx1BjtlbNtftBFJzcG7RIYZtzUjeNQo2PD0TEzqjx+TxqkI6iLKwcER+IiNWbnOfKGi1Xlnna48jMjIiFI2K11MJn/fkhOsamRfvENyOi+j1tRPvv/yx0jjsoIsa5vkXEDGh6wdXlfNlWSePRiPhGOUbe+1yZ+d/QApeLVP/PePI6upZdi4KEx6J6yK/K9nXK69ZAdYa2ImLxiNgoIo4s31GU7a3fC0fEopn5Vj/JnAXMX91QKYcnUGDjysx8pUF+Nqb//egD6Hw8vsua1JoKD6DOsXVRuc6LevTfQY1b0HHT9lxS5uefFhHXlDn22ev5xSJiscz8XT/JjEXXjPeOgShQ8HU9GpZ1sR9a+4OImCIiPhgRU0fPWmdL0eECiwNRKeuFUEfE1Oj6+wDwH+BTlWvAJjS4BtQdt5XnFynl/Xx3PsUEYLgjFxPrDz29itNQudUTupD9GFigQRoLoYpBK0K2Fjq5H9brdY2GOqIAxruo92VDYLo+XtM0rc2Bn1Qeb4FORqdQIm/032O1PPAi6rH+OeWer5Xn50NDXWsj1L1ev1av/LQiqouVx/P09Xn7+vyod+mw8vhh1AO9PerRbHwrQNQLeguVnhZ08twZVWb6vU1OH+ltXfaJTVAg6jV6TWEZ4D45PZqf/D+3CaxJZ33gvsrjj6FhqLOWx7P09933Smtt1FhsjaT5ET09xNcBK3ZSRt34KXm6p+TjGHT3hHPLfvEPGtzyFPVU/YWe4YfXoKH026BG7Ikd5Gcp1KO/WUnn8nL8tqYVLdT6u0FaK6M7RayKKok3oIbW4WiUzSYN05mzvLe1T/0BVaR3Qws87dEwndWBO3p91o6OW+rPk0cN9b5T/v+sJQ8zoIr5l1Al9bPluH0b+FMH6W2Ngoutx0cDh1QeN+4xQ43fG0teBrQvlf3mMOCblW3Hl+PjEsrUNIZoiGof5X1A+YyTogr6yyjo/ijwhQ7S3ZNy608U3HyWyjD8DtJZkcp8/HJu2KuU/Q4N0/ifa3cn3/t4LPvd0YiaW1Dw8K1yTO9azgej0HlrnYbp/RT4dPl7YXSt+QSlx69JWuX1N1cez1f2zweALRrmY3N0TZsWrd9wCmrgHUtZtwfYusOy+nHZj06i1zTI8vwOaNHm8fl9tYZn74uuuzeiaR8Xl8/7NzoYnl5J91G0PtKxqF5yFrBw5fkv9/fZ6L8+2aobbNdBfga9H3W53DdGIxdfQLcE/AW6O9ovUXC5kyl9P0fXhBNQ/XlpNIKpVe/esU1Zf6bsix9APfEXUa6x9IzGaDzdCZ33n6w8vqx8pgepjB4e4rJ+Cp2PfohGTzwJ3I46Fb7YYXrDftyOtJ9hz8D77YcyJL/hayeh3Fe1ckB/EDWuOp7HiBpBny8/d5aT8gr0zMHdtIO0rkDD26dFF6EbUIP4OyV/U7d5//KU+7OjhueP0IrLm5Rtu9DBokOlXLZvlVv5fSrqeQQ1QhsNlUKV5xPpwuJq6OJ3Rq9t06HG49eq322DtNYBbip/L1JOgn8reepzIchu75Pl9R9BDcT3hsWWE/Rny9+X0GbBwEpaH0JBlCPK+y4r26cp23YfyGcaYDlsiSq7Xy/78ijUy3Fqef50VLk6pGF686HGwWvAvZXt06MKW6PAEar4VO8/vhOqhJxcyu9RFElvktaKpVwPRZX9qyrPNV7MDkXyTy3f2UXAw732j9Npti7BdujCOxk956GOjlv6P09uV90+BPvQUqi3+yJUUTkXNRJvKc+vgUarvNH0uC374Erl79FoKtV9wMkDyN/0jDv0eueB7Eto6sRVaPHQbwKPlu3zo+H4/7MI7BCV93lomHxrMdITUYB6TjpYzA6d285k3MbPzuX/LFweN10HZDZ6hipPX9m+BmpA/E9ltI806q7dU5TnPzMU+3clP61jdY+y/+yLrk+7lf3hPLSae6f75sWVxz9B58+z0fWyaXB0AVTZ34DKgqroen4fDQLkqH7TWqDvoPI9bYPqNzfQ4foWdCkINcjvbMnyPZ2MAgX3l+/vQnTOXQuNIvwNfQxh7yfdrYHrKo83R6Msf4buetEkjXb1yU4a1l3Zj7pU5lOU/EyOzo3Hod756ei5Hes9NFzQtLzupsrjZ1Gnwg2UaScN0ribMsUY1Wm/R4eN6V7pfQkFQE5Edbnvlu2rAr+mcr0ZwrI+FgU1V0Br71yDrrc/oIMFTUfCcTsSf4Y9A++nH7Q4zM+B9Qbw3uoieFui6HmjVbjLe0eVC0erV3hGNJT6rnKCPoZKlL5NWpOgqOXh6ILzPGU9gHLQfp8GDUfGXSxwDGpE3I6ihc90UDbzo2GRi1Np1KMo81God+26BuksgC6ei6MG5JMoItvx4mqlrNdClcNry4n1C5Xn9wQuaJjOevTM+94XzUG7HC0AN4pB9Kx2uk+WMv1I5XGrofYVFHnelDLftkFaG6Bo+WhU+biWUuEvz+8BnD3Qz9ZhOWyIKjoXotEOD6Fek01REGNWdIHcDQ3P7yTtVYE1Ko87CR7ugyqsU/faPnsp71fQUPcmae1djoWZyuc9iUolmoaL2ZV9cGs0GmAPNFy72vv8lYbH2x6oAtv7TjEdHbe93jvg82SX9qNL0AildVHQ8P/KZ7wE9YRcgQIJjY5bNPplk1IWU1W2z1+Ol51p3oBdCAUdlqCy1kPZl/btZF8q71sa9SzvSTknlP3qoaEu7/L3R1AD77JyzEyJzp2zVl7fdjE7enplW9fKVuN8ZnS9u5SGI6oqaa6Cer2/gAI0k1byUzvqgGbX7qNpcO3uYpmvhBp5J6FGyIGoTtBalf0XqOHYCr50sm7S5aWc9qPc7Qddl3+AbuMIzRpH25bvak10vmsFOi4AtmpT3mMo52tUnzmpUvaToeO5bbCnkmbXglCD/N6eLPvfXmjq2wPl9/+hBtc66Np0Fg1HZpR0V0CN8nnK4y3QeicblGOldt0Vmtcn2y5i2ivNH3RjPxpkea+Ogh7Hld+7ohFfrQVNn0QjEA6kYUcgOndcXj7X9ymr+KMA6n30MwqmlMvk9HTOtY6JTVCdu9Hop15ptjrpZkFtiBeorA2GggnbD8G+3busdyvHamtB02VRUHxSGi5oWl7be+TMkB+3I/Fn2DPwfvlBF9XF6NLQnXJiXrvp/678PSnjLqq3cDm5/5sGC1n1SmteynzNXq95jAEs9FTeOw1aBLBpQ3YO1ON0N+qd/FzludnKyfn/aLMoWknnbhSdvhJdROdDFdPW0Pm72qVTSeunKMr5k5LOmmie3r0oMvwMlUZym3SuKp9jcXqGlz021Psk6g14oZyAT6GyYAy6ED6Ghoi37a2opPUY6lWYr2yvVtIfabJPduMHVey2Rb0tC6DK4nEocl2tsD9G5daXA/g/k5bvsu3+XfbfZymLK6KFfcZZQAnN4V66YVp/pDLKATW0Fi9/T4kq+/2WdyVPC5bHY1BD9G7Ug7Un6mXot4xKOs+ggNq1VFZ/rhy3f2yyL7X5P43Pk13Yh6ZEvRp7lLI8FZ17Ti2f51Xgng7SmwMF9U4DbkIN9cnpGV3xKXR+maFBWnOW7+hydC77QcnnApXX/L3JvtTP/5iWMhd5iMv7i+W4mgo1YE5Dgb8dGfd61WgxOxQknoY+gt+oV+taNKqik7uYTIICWWeihv5xaErFs+i+6XXv69q1u4vl/nPUq7wK6s17CjViVizPP4wCXY1WZWfcBQenKmlejCr4rYbJYbQZwVC+szmBFcrjPcpxcwSq8H8FTamao2F5twKQ0/V6TeP6DT2B9ZnL78lajxlgEGqA39maaJTb7GU/Wg71VJ9HZZooWsvj8XbHSK+0Zy7pnI2Gzj9ICdai80xtjzPjqT5Z9oWjUCOvo/2oi2X+CGrAHosWoP49atwfjTrIbkXB90YLmtIT0PxCOUbOY9zbmB9NGcXbrqzL42rH3TpoBOJGNB8BuxSqn1Y7e6pBg/nKdzfbEJX1Z1Dg8gjgz6WsN0CB1xlLWXe0oCk9Uzhad8Mb0uN2pP60Tmo2RCIichCFPpD3R8TJKPp2YJbFTiJiUuC/qUV5jkENt60apjU5Wp3/pbJt2sx8o/z9HTQ8cOeOPlhP+p9Dwy43bfj684DfZuZRZVG2E9DctqfL8xejBcm+2kE6G5Z0VsrMVyLiw2ho8T9bn7lNWucAf8jMIyLim+iE9QZqKM2Dggd/zsxHOkjnQNSgfRPNIf9lZt4QEZNns0Ua+/s/bfep1msiYnd0UX4JDf28HzVAv4ZOpi+nFqTrJK1XUU/lT1BkeGc0J3Lege5HnYqIvVFF84rM3Csi7kJzo5ct+dsV9cbMkpl7D/B/BJof++nMPLXB63dE++AXI+KjqGfiLTTM8RYUyFoxM8/qMK0VUKPlDTQv/BZ0cZ0jM7/RQTorop6Pl1CFfxnUkHsoM+9qk865aHj7KeWc8vvMPKHy/IXoeNux3WerSX9Q59mBiogNUKXuj6iBtQVqWL2NGlqnZuaRETFFtlnIMiJOA57LzKMjYk+0/72Jpiddn5nXRcRymflwg3ydUtL6VkQsiYJ1z5V8Hokaoatm5vcG9skhIkaj3qwrB5rGAP7nBug89Bs0ImwmVJldBO1fW5fXTY96IP+QmYe3SfNytJDWVOh8fVBm3lh5fglU8byrTTqbokbUn7Ms5hdapHYN1FiYD83rvqOfNLp27e6GsnjfscBWKBiyFAq4Po1GRR1aPvdWaFTMp7LNAmtlAbtJUUD0oXK9/Qwqp+fQd/s5NHXgxX7SuRQdH/OgKYa3RMRsaD77zKhX/Z7MvLqfNFr1mwMq5T268v0djxoMja5LETEN2o8+kJn39vG5f4BG+nwlGy5wPRARMT/qPZ0DBdwuQ9eiU8r2z6CyPhWdJ/o9RkqaswN/zcx/RcRU6Fz3AqoDPFb29VvQsPg+F8bsZn0yIuZF+9x95dhYGY38WgsF6BrtR91QPvtJ6Pp6OZqydjQKHiyCzi/nZsPFeiNiMRSsOwp4N7Uw9txoWuUDKID/DWCtzPxzTRp97dtj0Lnk3Yg4BHih6TUgIn6IgvzXZOa3y7ZW3W4MqkO/nJmHNUlvoFplnZmbRsQNqH78STR94x/o874dWtD0eFTXaHcNmAlYENVBr+r13JAdtyPWUEQn/DN8P/zvIoT79np+BjRUacoBpLVfr+cXRUMBB3wPYtQL3ui+uig6fieV2zWi+Xv7l79nRcOU2t1Wrq90TqHM90Ynx3Ub5mluFOls3fv512iEwWdRL9Ohg0xnMzpcXK/L+9NypaxWQMNnf4oqC58qzze+ZWIfaT1Y0tq4PD9kw8BQZfMfqPJ0U/m9F+rN/A09ixwO2T2JS57OKn+fiBpG86CK0Jk0XDi0QVqnoF7WtvNIa9JZAPVoHdnwPFK3mOGhlddEJ/vSSPgp565JUEXxHjTN5Bk0ReRY1JN3Cg0WoC3pHEaZzoBGZuyNgg9fKd9/056hdosZ7tEq8+Euw07Lu/z+DGoIPYsW/5wMXYfeRqPQ5kCNt7aL2aHpJPejCvZkaE2B51rpdJC33osZztBp+dLFa3cXy3w6dF16qpTJz9C1aY6yz6+BGm9voTskNUnznPL9XUnPtXv68l2uh9Z0aTc6r/dihqehtVMOpvltINuV95Ll2O5k6sWlqCf+Rspifb2e/yCVaWzj+bvbFnVa/AONLnoQ1TPuQD39M9JwcdyS3q1ovnhfn2sS1HA+ooPyHlR9Ei3wvE0f25dA09++0m4/6mJZT47qRq+i4NrOpaxuQVMxvtZhencCX608nr783gGNPDiPfhaPbLBvj3OLygb56b3A4sVURuag8+1QrW0wedkXH0Lno51L2V+E6m4PoyDAKPqZptQrzdai0XcDm/fx/JAdtyPxZ9gz4J/x/AW3X4RwdxoOB26Q1peH6sRcydOHqMz/Rj3E3y9/X0+DFfAbpHMNvYbPtUmrv7sX/Jh+7q/bMJ3pUEWk0V0QxkOZb18uxJOjCPoe5UTb8SrFNWldR8NATRc+y1RoxMSkaJjbDeh2VfeX5w9Ht667mTKdYojy1ZqPeB6aGnIjlYAaGp3RaJGeBmnd3eQi2DCdtRqk02jRVyaQhmw5rg9Hgb0z0Ordj6MA1H2oF/wFFCC7h7LQYYN0F6umU9k+upR1o3TKe/pbzPBshjAgNh7Ke2HU4P9eeX6WUt7Lo5EVp3SQdjWd6jDq76CgT9OKdd1ihq0hr5uV37X7OF28dne5/HdGQZrnSp5uLvvXd+i5o0ajOw6gkQm3oWHqq6KG+dIDyFPdYobHo/PUdA3SaFfeO9Bw0d/y+v7uzDBP03QG+V1NhTpQWtNI5kW91h9Ew+dfRLflW7XDdEejaWm/pWfF+nlQHXA+dJ3ot37SoLwb1yf537sybIWCpcfTz1Sg8Vz286HA8R/KfnA7Gip/Dxrtt0TDdJbp9dnGlmPuGprXJduV9TeozOVvkF5XF1jsQllPgq6Tr6DbL5+Kggjno7rSL2m+uOpmlDs9oUD9Raiz7no6vBPaxPoz7BnwzxB8yV1ahLBBWr8fhs8WvfIzLeq9OIKykvlQptPmf3R094Lxnc4g/v9iqEfnEXpW0B1QEKObaQ3gf3+YnpXvn0SV/YUoixminvRHUaVkOMv7c6i34meogr7ZQPPTrbS6mM6wLmbYpe/nrLIPL1kqLf9EjfGF0HoCJ6BGzGrAzwaQ/mwlnaPRHOWBprM0w7yY4Xgs74tQj/UeqOd0g/LaqVCjstHiaqiRdRm9bidaSaffhiNdXIi4vLdr1+5BlvkMqBK9JQp+fxKNUhqFevUvQSMP5qGzgNY1VALOJa3TK4/7XZ+EZosZXtT0nNJPef+aDus3tL8zw3RD8L211jS5HU0n2QCNDDsaNf7/SJleQMPe2GpZoeDVNCgI8RwaZdNJA7Qr9Unq78rQWiyv0SjWLpT3LGgU0Paosb5z2acPRAvdLoJ6wU+jBA8bpNma8rEgmhL6QxSAOATN5V98kGX9K+B3DdPo+gKLXSzrlVE98lQ0fXLzUtbzoADzBg3T3YUyIqN8dz9Diwp/GwUg5huqzzhSf4Y9A/4Zpi++w0UIhyqtLn2248sFbI2RkE5Ja8B31Bgf6XTh82yKKn2t1cEH3DvczbQ6/L+XA18uf6+J5sZdQ+V2dGgaS6PFDMdjPkeVi9+BKHp+KPCx4Uyrm3nqle6QLWbYpe9mDjREsrXvToOmS12LGrKLlArHL1Cv0ycG+H9m6kY6vdIc0sUMx1N5T1sp79vLsfokCvyNQYGAp2lTuWbcRQeXR8NvB5LOeF/MkGG43qKGy7dLOf8KTS9o3fJySTQCYQ/UUDq/QXqtBv9n0Wi/6sijh8rfX0XzwJuW96AXM+xGedPlYMYg8r1+OTe3FtU7H90a+GoUTPhmpaxXooPeVHoWeTwSOLr8fQfqxX6JBqvWd6O86eJdvrpU5rejwODZ5Vi5Gk0PeaZ857uh3v1f0cHIGjRd5yBKAKKy/ds0DEB0ad/u6gKLXS7rC9GaD2uiO2CdgEb4TIFG7jW9BeMKjBuAWKny3Bk0vPX1xPzjxRHfpzpdhHCo0uqGspDMtpl54AhJZzSqNK6bmScNdzrdEhHTZebfImKSHOQCMd1Mq+n/Q8PrTgfuSi0OdBIaQrghWjzwRjpYzHAoRMSozHx3JKXVjXSGazHDwSoLXp6FphKcHxHLoN7ljdEaEJejOZatOe7PDuJ/TduNdCrpDflihoPVR3lfinrCV0GV6stRQ3RnVFmcEvVc7tEm3dYihAdnzyJtuw0infG2mOFQX28j4mNo2Pea5TN8DU3BeBQ4OTN/ERGnoqG8h6BpfU+0SbO64OCkmflO5bnGaXV7McOa/9Hpgs3V/z+6lNl0mfm3ymseQ43rpwearwb52BaNqmstPnkLCszMjOoRy6Ph5dd2kOY45/qyKOLBaD2RDTPzoxGxEvBSZv5+gPluXN69ynpetCYGWVkweCjKuvyfFVBD/glU3g+jXu9fAn9BDdJn0FSr32fm2DbpvVfWEbEQuq4si4Izh6MpJteiaUGPDzDPne7bXV1gcaBaZZ2ZHyt5WgIFkB5DnT//RCPH/ohG07ya7RdIr5b3CmhEzqLAW5l5eVkU8QHUJnhy/HyyCYMDB+9TpdI4VZbVa0dKWt0ykhpFlbS60jiaUBtZI01EfAUN374V9Qyvn5mrRcRngbky87vDmD2bQETEOmjo6WtouO5PUneL2BP1Mu4/rBmcyFTK+59oca6H0ciD54EnWuUdEWugiuSbrcZFTXrLo+HMP0I9rz/IzKMqzw80nYsz89jK8zOgEQw3Z+bbnX3qcf7PkF5vI2J1tFDs31GZX4qGOT+JGkbro2Dr94BjMvOABmm2GiAHZubLZduYzPx3RHypSVoNyntJ1Eu7fWb+veMP3pNOR+U9FMGMhvlYBDWeFkINntVR7/tX0eiDEzPzoQ7TPBk1ig+qfLbt0OjMjoIQ/fyPxuXdzbsyDFZELIX22wUp01SAd9Bddg5E03umQtNesl29slLWB1eOkc+g3vTGAYg2/6OTsm53vLXuqDDeO39KWR+PAsVj0aiOddA0qqXQKLpnQ3fLuhv4eTU4WZNma186OMudKUJ3juooAPF+MGq4M2DDIzP/262KRzfT6pZuNfa7lU5JqyuNfQcNuuZCdAH+FOqVaTXw5kS9MWa1ImKxiJgLLaS5MKqQfykzTykv+RKaH2ld0Ed5fwsNk14NDQFeDdi0BP5Ai4rN2l9jv+LEzNyppLNCRDzQhXRWjYj7I2KT8tzngd8MJmgAw3K9vR/d5nAT1GC4FvUwfgetXj4/Cqz8Bs2d71dpgGwG/Be4NSL2BcjMf5eXNE6L/st7eRR8GHDQoOSrcXn3+my3VT5bK2iwJBrF1u9tb7shM3+D5vj/Ho0OOQNNubkbzQd/PSL2iojFm6RX+WyJPtvXy/+5AE29GXTQoKTXqLx7lfUtEbFfeX8raLAomgc/3su6/N+fowbs02iK41toJMYuaH9eCAUPFmwQNKiW9a0RsX/5H1dl5vpoxf8vopFAg8lzp+eS/o63r0fEwuM7aADvlfX1aFrC62gq37dQPW5adO0F3UHkjQZBg+q+dHNEfKP8n59m5vzonPdttKbH+55HHJjZ+0YZ7r098C8U/b+w1VNRnp8e9aRtlpkPDksmbcSLiAPR3TgmQQtUvQhcn5kPR8QotAjZJzJzl2HM5kSjrrzRNIJ3UXl/EjVkP48q7pNk5gcapv9eL1kZert1l9OZE63LsFCTdEaSMhx9eTTP+lZUSV8ZNcrmQNNylgZ+1aQHu1TS18zMY8o0iL1KWt/OzGtKD3bTtPr73ibNzAU7/LiD0s9nOzYzfxgRO6A7mjw6HvOwJVp/5m3g/4CbepXRtehYeg2VUdN9u933tlf5X091/UN1lp9WWX8ZeDgzHxvP+WgtiDpzZr5URujMhM5Rj6DROSegWzP+t0l513y2WdFtoa+MiH2AH2Xmb8fPp6rNV93xNgcd7EuD+P+9y3oVtGDwC5n504iYBq0f8SIKIDQ6d/dT3iekpinsDNye43m6y4TCgQMze9+IiNbcux+joZufRMPdjs7MNyNiYbQC+6XDlUcb2SJiVuCBzFwoImZCIw3WRYuWXpGZd5cA1H8H29tpnZd3qTz+CfhsZv54EP93RKUz1CLiYNSDtxBa/PDISm9uqwG6DOrxmywzF+gg7f4a/B2l1Ufaw1rewxnMiIiV0WKhZ6KA2g5oCs/FpXE/A1oH4CeoZ7yjMurjs30OfbbZGYKGY4P8VAN1kw1F4CgivoUCmiuj0R0HZ2V9h4g4Dt0Gcz06KO82n23MUJd1naE83hqW9ZzAjp3mqVvB4/cDBw7M7H0hIuYGrsvMZcrjJYB90Ly2/0NzUrs2NcUmTmW48VHAFpn5VkRMCVyMhgCvAWyXlYXQbHA6Le/o0uKBIy2doVR64E4DvozWNrgSVaC/nWVec2gO/d/QbVkH3WjoYqBmxJX3UDWuIuIENJ/7/NL4OQwt1jkZ+u7+GJrXviXdWxx7RAXGhrghuywK0qyHpvPciabunJKZR5bXTIvu+rBhF84lI6qsYeiOtw7K+j9oWlU3zt0jrrxHAgcOzOx9oTQ4foBuF3ZVRCyIemd2R3NBj8xMz0m3fpXhkqejhaueQYthvZSZB0TEOcD3MvOB4czjxKTT8o4uLR440tIZShFxAOplO6w83hRYEVgc+GFmnlu2j7i7M43E8h7CxtWO6BaXO2bmHyLiCrSY3UKoJ3bn1F0eurk49ogK1AxlfiJiV+ADWe62UqYpbI1W438A9Yh3rbxHWlnD0B1vTcu6m3kaieU9EjhwYGbvG6EFz1qLID6LVsA/sVSU307fScEaiIipUa/Gx1Cl5aLMfCcibkXrZlw0rBmcyLi8h1ZErAocCeyKRhRcjlaMfx4t9rZzZv6zyw3QEdfg75YhbFwFcCzqlf0tMCozNyrPXQ3skJl/6fL/HFHf21DmJ3TL7mPRGgYvoQX0zgZuR6v+793NfIy0sh5KQ13W5X++b8u7Pw4cmNlELyLmQKvrvhURk6Ehzk9k5ovl+ceBQzPzh8OXSxvJIuITwGLAKqgRdX9mvll5fgd0V4WVhymLExWX9/AKrSy+B3AH8M/M3L5s/wnq0fZCYSNEaAHLBTLzFxExM1r4dzrg2cx8IyL2QEPl1xrOfE6MIuLzaN2VXwJ/z8w9y/b7UYDt8WHM3kTFZT0yOHBgZhO10G2aPgwsADyIVmL/Seo+v4EWeFozM79Un4q9n0XEpGgxvj3RQmDHohWzD8vM+8tr1kaVGd+NY5Bc3sOj9OrNjdZ8eQn4JzAaeCcz/xO6L/qnM/Njw5hNq4iIedHc7/+gheMeAa5Cd6b4V1nb53zgazlEdzyY2EXESsBswL/RyI7fZmZGRJTfXwfW93EyeC7rkceBAzObaEXE7MA9aB7cnMAhwJLADWiI87OhVdv/nZl/Hb6c2khWerc/kZmfLY9XQfeNnhUtznTScOZvYuPyHnqlAXop8AfgL8B8aBjwDZn5TETMBuyN1pT4zfDl1KpCdwp6KjOPi4gN0aKWNwNXZeZN5TULZOYfhjOfE4tynFyJ6hWvoWkhDwMXZObPI2JG4AtoIeYhvV3ixMZlPTKNGu4MmJmNRx8Efg1kZj4P7IvuAz8TcFxETJGZLztoYG3cA7wREUuURTZXRr166wHLlB5y6x6X99DbD7g6M7dBi1GOQUHWbUrv3kvotrUOGowQZe2PaYA/AmTmdeg4eQs4JiK2LNsdNOievVBQZm/gPOA3wIzALhExe1lD4kw3ZLvCZT0COXBgZhOznwAvAwdHxBfQkOeHM3N34A10f2WzPpWpLKBF4V5DtwX8HrAuamT9HpgZWHV4cjhxcXkPj7II2MvADABlSPtvUdB1URRwxQHWkaWs+XEFsFZEfKmsPL9Sub7tgabnWXf9Bo3GoayR9DbwUzRK56tl+1vDlruJi8t6BPJUBTObqIXuN745qgA/je5n/U5E3IKG3V4+rBm0ESsiZkEB9hlRI2qR8tRbmfl8mX/5PWDJ1q2gbOBc3sMnIj4EjAXeQT3Y62TmchExF5ritUtm/nsYs2h9KKNvvoxulfkP4LbMvLncSm6bzPzksGZwIlPWjDgVBTAfAVbMzOXL6I+z0CJ9rw9jFicaLuuRaZLhzoCZWbdFxMeAudC9x8/PzCPK9knKIl87ATM4aGBtnIV6N95CC2yemplXApQh9BsDJ7gR2zUu72EQEVOVFfmPBJZF0xRai8WuACzloMHIERFjgB2AyYApgZMz8++V5ycDDqf0ytrgldFQ05cA5s7AMqjsTywvWRtYzA3ZwXNZj2wecWBmE5XSA/MQurfv1MBJwA+BfVtzPSNiU+DVzLx72DJqI1q5hdnqmblpWaTpONSr9yu02v+LwEyZ+crw5XLi4fIeeuVceSgwDxppcA7wYAmuBjA9cDewV2beOmwZtXFExDHA/Og+9tsCGwHHVQLkc6DbL541bJmciJTj5ATgA8AL6Balt2Tmy+X5GYGrgSMy87Zhy+hEwGU98nmNAzOb2OwC/CIzL8jMU4GtgYWA+yNia4DMvNpBA2vj38AtAJn5f2iF+ZOBJ4CPZ+a7bsR2lct76B2GFor9CvA6cCdweETMkOpVeh04wEGDkaNM5/kU8JXMvB2tP3EdsGFEXBIR02fmiw4adNXhaIHlT6BA5snA/hHxkfL8X4FD3JDtCpf1COfAgZlNbO4H/hIRk5fH86IhbhuhYbdmTfwcOCIivhsRHwd2QyNZrgc+ExEzD2vuJj4u7yEUEdOh8+GZmflWZu6F1o9YDrgrIpZKuW5YM2q9/QVd45Ytj1vTFdZDC/4uNkz5mihFxFRo5NM1AJn5HbTC/+zA6RGxWjlOfjJ8uZw4uKwnDF7jwMwmNn8A5gZ+HBG/RBWsLTLzjxExNiKWy8yHhzeLNlKVBZkWzMy7I2JV4AhgE+CwzHy0NLgWREO7bZBc3kOv3F7xbxHxY+DjEfEGMBWwRGauERF7ojUmfj6sGbX/kZn/jYgHgSsi4gHgX2iE3V8j4nfA+mjleeuCzHwrIn4EfKpM3/kHsApaP2kbFHy7ZxizONFwWU8YvMaBmU0UImJaIIF3ywVoDTT8+aXM/F1ELAtcDHzIi6tZnYi4E/W6fiIz7y/bRmXmu+XvC4E/ZubBw5jNiYbLe+iVNSReAj4IHIwan6OBBzLz+IjYEtg0MzcfxmxaLxGxcWZeU/6eCi0Weg/wXGZmRNwPnJWZ5w9bJiciZUHWfwALo+k8MwOTAndl5pkR8QlgL2C9dGNqUFzWEw4HDsxsohAR56Jesz8B06G7KdxdnpsKOBZ4LDPPHr5c2kgWERuh1cpvBNZBCzA9Wrkbx0zoFmcn9puQNeLyHh4RcQfwk8w8tEzpWhx4KjP/WZ5/EDgjM88bznxaj7J46Kcyc93W8dHr+Q2AzzvY0z0RMRY4LTNfLneymAkF3CIz342InwGn+zgZPJf1hMOBAzOb4EXEPsDKaDjbR9Bw5/+ie/8egXrU5s/Mp4ctkzbiRcTPgd0z866IOAmYMjN3GO58Taxc3kMvInYFdgV+C/wgMy+uPDcGTVHYKTN3HKYsWi8RMQ1aOHTbMnpudWBV4E3gGRR4mxP4d2v1eRuciNgNWCszN6oEMufLzD+W5xcBds3M3YY3pxM+l/WExYEDM5vgRcSBwB8y85LyeB8ggFnRrXy8Krj1KyKWAjbJzLHl8czARcDbqHH7fF89fTYwpbw3zcxDymOX93hWpnPdAmwKLAF8FzgmMy+OiNFl/vwYYHRr9IENv4g4BNgxM+eKiLmAa4EbgDFo8d+DM/P3w5nHiUlETA08CWyZmQ9GxI7A0mhkzp+APUvP+HtTqmxgXNYTHt9VwcwmBk+gFdn3i4glge1QBflOYPsyf86sP09VggaTZuarwFbAi8AGAG7Edk9m/rwSNHB5D419gEcy80+p25kdA6wbEXO21n3JzH87aDDinATcVhb7vRv4XmZ+MzP3R3dS+OSw5m7iswrwe2CRiNgW3eL5R2ha1Vvodpi4IdsVqwDP4rKeYPiuCmY2wcvM6yLibXQLtw8CJ2XmzyPieXSvcgdJrVYZCjxvRLyQma9n5julB/b1iLgauDoi/ulFx7ojIlZECyL+KDOf61XePwSucnmPF8cA1WDMlWiK110RsW1mPjg82bI65bj4K7BdGaXzReDyykumwnX5brsLjeZYEfgEcE5m3gwQEbcDG0bExQ5sdsWdaBHrNdE6Ny7rEc5TFcxsghURX0IV359m5vfKtsky81/l70uB37R6Ns16i4gVgKPQrec+B1wKfDMz36i8ZmHghcx8e3hyOfGIiFWAk4FfA+sCy2Tmc71e4/Lusoj4CnBBZv4zIkYB2VqdvNx+cR7gkMz8+3Dm03qUANvHgY8C56Pr3MuV5zdA56rlhyeHE5+ImLJ13ikjFVcBHs3M18q2c4DfZuYxw5jNiUJELATMCPwOde6sAtznsh7ZHDgwswlSRGwG7AmcDowFvgz8BkWv/4Ju57NnZn5juPJoI1/oXvYXl3neKwPXAM8DR2XmlcOauYlQRFwLfD8zfxARRwKTo6HWZwOnZOa/hzWDE6GI+AxwBXAd8IXSg/3ebS/L9K59M3Pb4cynjauMmDsArWOwOPAa6qH9MfAu8DW0ts9Vw5bJiUhEfBzYApgCTVV4BLijEkjYADgwM1cavlxOHModdfZDdbbfZuYRfT3vsh55HDgwswlSRDwO7JOZt0XEccAiwCzAg6hn7dHhzJ+NfBExI3AmsH9m/q5sOw4FDj6N9q9HhjGLE42ICGBq4DS0CvxtaFGsXVGD6EDgOeDLHpbaXWXI77eA9VBQ9agSMPCCYyNURKwEHJSZ65fHMwBbA2sA96ZvUdp1EfEbYGfU+70JsAxwL7q18y8i4pPA25n5k2HM5kQhIp4A9gb+Dzga+AkwP/CrzDw7ItYF/uWyHnk8L8rMJjhlZelLS9BgamAztKDa71ED5OAyZ/fN4cynjWyZ+ZeI+BUwtgyLXAlYNjP3jojRwPKo18kGqQyL/3tE3AhsjCrmz7RGdUTET9Boj+lQIMG6oNxx5i+ZeUtEvAMcBPwVOLUVNHAAYUT6HTBlROyNAuGvAqeW+9mfERH3ZebDw5vFiUdELIsWyL2tPH4COB4t0LcR8IvMvGkYszjRKLdX/F3rblcR8VHgcbTI9XoR8Wxm3jKMWbR+eMEwM5vgZOYLmfmt0ov5H2DDzHwyM9/KzAOA6dFwQ7N2jkLDJXdFC2J9vWwfA3xouDI1EbsSVch3Ae6JiI+V4N9OwJjW/FYbvBL8eg1N6SIz7wKOAHaIiB0jYlRZfM9BgxEmM18B9kK9sJtGxDJl/v1DwKPAssOZv4nQH4BpI+LIiFgMjTgDrceyfsT/t3ff4XaVVR7Hv7+YEMoEEAJDU1oQld6GqiBIG8UJARmaVEVGIEF6b4IJVSBAUOZBEJAqJVIDg5EwtIgiNYJAjICRRCQRJCQkP/9Y7wnH672h3EP2uZv1eZ48uXvvkzzr2ffstvb7rqW1qwutdl4m9vX4MnXtIdunEO147wMGSsoX220qfzEppR6lVMBfDphgewowTdKTkmTbkk4nCiJOqjTQ1LYkLUq0/ptBJJ4u7lB0bAliyGq2OWsBSfM2WvyVGgYPl/WPA0cTLRhXI4pTphYpLRYvhigaC0y3PVrSCcT3/wHbT1YZY/pnkpYhCv5+2vZZkkYRLelWBvpK+htRgf6oCsOsnTL6bDBxDroeeAg4xfbkcp5ai0jYpG6y/SbwZUk7A5OA/5a0oO2pkpYkpijkdLU2lTUOUko9xpwq4Je3a6sBPwG+aPv1ygJNbU3Sz4g6Br8nbsjXIebdjyg3iusBy9u+bg7/TXqfJN1D1Dc4qLwxbd62MTAN+KvtF6qIr44kLUWMmPlcZ/PhJQ0jpnh93vb0uR1f6lyZsvMQ0U3hJ7YvKNe2LYkK9EsTNQ4erDDM2pD070TxyVWAnwHTgb7AO7b/Xq4FPwa+0Cgqmj6c5o4VHdafDXyBmK6wCbBx7uv2lYmDlFKP8V4V8CUtD/Sz/XiVcab2JWkA8R3aoCxvRIwueA0YZ/uiKuOrG0lrEsm8W4GdiKKIJ9meWLbPA8xw3oy0VCmI+DDR8vIO4HsdEwSSlrM9voLwUick7UQUB91S0uLAT4GpRNLteNsPVxpgDUkaTRTmW4VIIF9GjED7c2lb+l0iiZDFKLtJ0iXAZKI+1eNN6xcB1gVWJNox5v1bG8saBymlHqFcXN4g3sZg+wFiTtwVwAGS1rL9Yl500nuYAkwvwyQh5oD3J27S95S0VmWR1dfJpfbIhkT7xTGSDivbLgW2qCyyGpK0FzCz7PPtiP1+vqQ7JW3S+FwmDdqOgEclLQwcDHwC2ItI/IyUtEZlkdVQafk3w/aJtncELgB2Bi6VtJrtWbbPzqRB90naHRgEvA4cKGlwmRKI7ddsj7I9Iu/f2l8mDlJKPYLt14CniAr4X5J0DFEB/wfAbcTNcUpzVGpfnAfsXlp6ng5ca/sh4gZ9tQrDqx3bjwGjJPWxPcn2XsCewGaSJgOrNSqZp5b5PHBi+XlvolPFccCdwA2l+FtqE6XIL0QHl88CZwM7AGfanlqucVcRb8VT67wF/EFS/7L8GDCSGBV1gqQssNw604HDgWuAXxDTQ06U9HUASWeUl0OpzeVUhZRSjyGpD3AYsB7Ruud2249IOhpYxvYBlQaY2lq5QZTtSZIWBDYFnmi8eZX0BHBIo01U+vDKsboPcF1jvqqkeYkCfY02gG8SHVHurS7S+pG0pO0/ld/BjsCoRrcKSecAY2zfVGmQaTZJfW2/XX5eoaz+DJEoOBdYhnig3dn2M5UEWUPlQfV8YAIwE9gaOMr2vZIuA84tic/UAo0iuWUKyKrAxkQCYVuiIOL6lQaY3pdMHKSU2loZznYIkbHuTfS0fqZp+5LAI8C2WSE8dUXSycBKwEDgAttHdNg+BFjd9r4VhFc7ko4g3nrfBdxp+0cdtm8KfM32oVXEV0el48x8HTqEzNOobVDeoD5APID+rqIwUweSriOmS33b9nNl3QDioXY+4G/AU7aPri7K+ijtX/uV5NrywO5Ei8CJtm8vCc6xwH9lwdbuKV1CzgWOtP28pF7N7V8lfYWogbNRnpN6hkwcpJTamqQbiQKIDwKLEXM+nwaOtv1HSesSFfCvry7K1M7K0OxriTZm8wE/IhJQ1zZ9ZmFglu2plQRZM5KWBk4ghv1uRUyNfBl4y/ZQSfMT+3tahWHWSnkAvc325Z1smxc4g9jnB8/t2FLnynFwPvAVYH7gRtt7NwpXShpItAF8qfmBK314czpOyvbzgPltf2vuRlY/ZeTGesBw243WsLOTB5JOBZazvXt1UaYPIhMHKaW2VUYb3AxsbXtKWbc4Uen4U0QW++XqIkw9gaQfAJNtn1aWBxLFx7YpQydXt/24JGV1/+4r7eMAhgGzgOHEkPnTiCHXh9t+qaLwaknSZkR3mY3K8prAssACxLzihYBvAD9sDItP7aG8ld2KaAd4MbAN8fvqV3repxbp5DhZh5gK0g/4qe1ZktYmprDNqCzQGij7+lTgGOJ7PdL2UZ18rrftd+ZudOnDyuKIKaW2VVq2jQKGNK17FTgL+CvR67q5uFRK/6TMp7yx/AHA9s3AtNis9YErMmnQOrZn2p4JHA/MW5IEixJtz54m3nyn1jocuB9A0q7EPh4IbAkMLXUmLsykQXsp56e/EImDr9reBfgVMTpnnKSNq4yvhjoeJ0OJ42SL8jO2f51Jg5Y4lijweR+ROB7Q6Gak0Kd8bmZVAaYPrnfVAaSUUmckLVESB9cDl0v6MnCM7ftt/0XSk8TwzivzgS91pQyJHCNpAYi34eWh9gWiGNZuwNn5HWqtsp+nSXpF0qXEvl6HSPj1n/O/Th9EGe7+KLCUpCOBfYkb9SeANYEjJS2TozzaTzk/vSXpWGDvMud7CdufkpTFflvofR4nS+coxu4rxYdvsn1LuRY8Lelm4LuSJpdOOjMA8trbs+RUhZRS25HUF/g5cEJpk4ekwcBBwO+AW4nuCt+xPaqyQFOPJWkb4HbgbttbVx1PXZXpRkOBsbYvqjqeOpO0CbAH8Hpz8U9JY4F9s0d6e5N0HDFK5wDb/1t1PHWVx8nc1aGmwf5Esv7kbMPbM2XiIKXUdiSdTlzYpwBH2B5Z1vcBDiWGdv7J9q3VRZnaXWP6QcdKzk3bfw6cZPvRCsKrnab93RjV0Vg/+213TglpvU4qlfe3Pbn8fBTRLWTXygJM70sZFbVF0/Uuj5UWyuNk7un43W1eljQM+IPtEZUFmD60TByklNpKqYB/ue31Je0GbEj0Vn6j4tBSD1Mqyb/TKLxUivbNarqBWcz2pCpjrBNJiwFTmtr/9SH298wsgPXR6+TBaFmi4N4Otv9QXWSpo66SAk3Jt0watFjj+OgksZnHSYt1lawv2xYhrsvZwagHyuKIKaV2cxBR1wCiiNFSwChJy0EWQkzvTdLnJV0FnEkUPtwHZhfts6T+ko4m5tunbpK0qqQbiMJjoyUdCGB7RkkaLAb8rCRyUos0FRdrPHD+0416eQj6Wj4MtQ9JC5YkWiN5qea/iQJyn8mkQes1jo9yTlLT+jxOWmwOSYPetl8D/jaXQ0otkiMOUkpto7wRXt32bzqsPwd4HTi1qwtSSg2SRgO3AY8ACxPJKAPH2n5E0meARW0/WFmQNSLpTqLuyGXA9sAIYBzwXdtjymcG2P59ZUHWjKSNgL2BxYj2ls912L4ycY83ror4UuckXQn8lqit8ljT+sZIg28Dfy6dX1I3KdqSbgqsDlwFTABebIw4KNeCXnmcdJ+k1YENgFWJERzPAROb6husCCzVuCaknilHHKSU2kZ5Izw7aaB3+8FfDWxG9IXPUQepS6UY39vAZbZ/afsW4KvATcAQSSvYfjaTBq0haSkiKXOh7TdsXwGcC9wCHKjoUU8mDVpuBPAL4EXgUElrShrQ2N/Ew9JnK4su/QtJQ4hz0fzAbpL2lPRpeLeyvO0fZtKgpW4iqvf/Bdiv/NmmafTTl8jjpFWuIq69rwLDgfOALzdtXwvIUWc9XI44SCn1CJLWBfa3/c2qY0ntTdLJxIiCA5vWLQScAIy3PTznELeOpB8TQ09PJR6M9rC9maSLgNG2r6s0wJqRtD2wn+1ty/J44ElAxCibU/K73X4kDQLeAJ4lWgmvVJbHAGOJhNsBtnMYdwtI2oAofrtNWV4C2JV4Kz4qO1e0jqSBwDdtf7UsfxH4HjFS9BrbV1cXXWqlHHGQUmoLkno1/920XqXQzq+ItwUpdUrS1qWg5kRgJUlPS/oGgO0pwEvAumU5H6y6qbzhXhu4FNiWeLs3ADitfGQCsE5F4dXZOGCqpKNKLY8Hyg37McBWwC6VRpe6cjvw/7bH276QeEP7FrAJcDewSCYNWmoCML+kwZIWtj3R9jnARcDBktarOL46eR54W9IXy0jRJYDHgLOA/ST1rzK41Dq9qw4gpZSKPpJmdqiA71IFuRd0XXAnJUlLElNZHibqGnyHGII6XNK+wP8Rc8J3qCrGOilTFC4FJhFvu28FpgEX2H5F0qLA/uT+/ij8HrgD+BwwHXgKwPZvJd0HfLrC2FIH5U33X21PK8u9bM+yPRYYK2k/YDlgmwrDrJWyj18pRXB3BLaXNBZ41vZoSWOANYiRHqkbSsHDJyT9BtgZOARYCDjU9q8lvUgkkO+qMs7UGjlVIaVUKUX7xcOAPwHLA7favqZpe3/gFKLQ2tvVRJnaXRkW/0fbQyUdRsxdfQF4k3iQuhp4xfajFYZZG5IuIPb36ZLWIjpYvEiM6hhGzLFf2fbwCsOsrUZLOUlLE+fPR4A+wJHAFrYnVhpgmk3S3cS1bYjt28q62e3qJJ0KzGv7sArDrI1yT7ETcDYx535HYHNi2PwneLfewUa2X6kozFqQtAbwQ2BPYDxx3Z0ETLX9XCmIeAvwpWx9XA85VSGlVLULgGeAG4mhbadLuqU8jEAUXrs4kwapK5J6A5OBfmXVPsC9wA1E8mACkZDKpEELlNFArwF9AUpB08eJN+ErA9+yPQq4sLIga0jSIElLlzd8jT70rxI37JsTN+1nZNKgfZRj5XbiOjZM0qhSwHIfScuWQr8nZ9KgpUYAk2y/afud8iLiAOBB4HfEeWufTBq0xEnALGCQ7bdt32n70ZI06AX8D3BzJg3qI0ccpJQqI2k54BLbW5blRYHjiDcDCwJH255eWYCpxyhvma4hRq4saHvjsr43MJpoWZedFFqktN46kSju9jww0Pba5Zg+Dvh208Nt6qZSq+Ny4ErgeuC+Urejsb1fzo9vT+UcdAAxtWd3ohbF0sSInOfm9G/TB1NeOJxre9OyfApRDPEN4HjbT1UZX51I2gH4FnAgcU56GvhOh/PSv9l+o6IQ00cgRxyklCpjezzQS9J5kuYjLvCrA+cT89M3rzC81IPYHmd7TWAvYt7w0DK3eENgnkwatJbtx4mq2WOJjgqNbierAWtk0qDlViG+2/cQc4hPk7R+U1u5RvI1W9W2EUl9S92exYFjbI8g+ts/BNwvaedKA6yfSUSRvhUkHUScj/YiRjPeI2nVCmOrmyHA90ur3a2IQp+DGhtL56JMGtRMjjhIKVVC0nZE9fsXgaFEVfZ7gatt3yHpOOAd28MqDDP1QGXkypHAfxLfsTNtZ2Gmj5ikBYH7gSNt31F1PHVQEgECVgUm2n5V0iJEXYONgOuIGh5r2d66ukhTs+YaBmV5AeB44O/AdrbXk7Qh8GfbL1QVZx1J2h/oT0wPedj2PWX96cBY2zdUGV9PV85JfYFtbd8kqY/tGaVN7PeBs7PVZX1l4iClNNeVB4yniXnpdwPDbU+QNJ/tt0pBxIeAnWz/uspYU89UvmOfJK5z4ysO52OhzOfePm/MW6dRBLH8PA+RTG0U1VuJKBy7I7Cu7d9WF2lqJmk4MA9wXGN+t6Q9gXOAvW2PrDK+uulQbHJF4gF2HeABYmSUgZHArrYfqyrOOmg+J5Xl3k3dsLYEDifq24zMtsf1k4mDlNJcVYavWdIQopjdJOBrwC+JC/2BwGbE/M9TKws0pZQqVh5A+wLHNj2AzgPMLF0VhgHL2t6lyjjTuyT9B1FJ/hZiqtRVts8o25ax/VKV8dVRU6LmeNuvlnU7EIVynyda9L5g+6SqYqyLsq/nJabeNM5JfYhz0ixJJwIv56iDesrEQUqpEpLWJVq4HUXMjbsEWAY4yPaNHYd6ppTSx0knD6BX2j6zafsniSTrXbb/XkmQ6V+U39vmtodJ2pSoSbEY0fHiZkmHAHfYfqbSQGuik+PkGttDm7avAowDnPcU3fM+zkmNF0OzRyGkesniiCmlStj+FVEhvD/wLHFjdSawl6St8wKfUkqcZ3t/YDCwiaQHylxigD2AZzNp0F5sPwKcVX7+JTGV5EfAYEmNyvOZNGit5uNkA0kPStqxbNsWWCHvKVpmTuekIyStlEmD+soRBymlypQWeoOB9YExtg+WtKLt5ysOLaWUKtdh/nAfYFciYbAU0S1kxSrjS++fpH7AK8DXbd9ZdTx18h7HSR/bA6qMr07msK+XJM5Jua9rLBMHKaVKSRpEXHR2sj29MdSt6rhSSqkd5QNozyRpN2AH24Pe88Op2/I4mXtyX398ZOIgpVQ5SQvZnpLz4lJKac7yAbRnKl1HFrA9tepYPg7yOJl7cl9/fGTiIKWUUkqph8gH0JTeWx4nc0/u64+PTByklFJKKaWUUkqpS9lVIaWUUkoppZRSSl3KxEFKKaWUUkoppZS6lImDlFJKKaWUUkopdSkTBymllFJKKaWUUupSJg5SSimllFJKKaXUpUwcpJRSSimllFJKqUv/ACZC6nCgcuAUAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAF2CAYAAADul9pLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAbxklEQVR4nO3de7zldV3v8fdHRlK8oTIhFwXyGtnFE6HdLbp4QeGch6cwM+xhh2N5VMyOGmqhaSc7pdHNE0WKdzmkaWYdlaN1TKPG0gzBQEC5ymhMAl6Rz/nj9yMX2/nO3sPsmTUzPJ+Pxzxmr/Vbl8/67b1gfq/1+/12dXcAAAAAtuZ2yx4AAAAA2H0JBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAMAuU1XnVdXDlz3HzlZVL66qT1fV1Uuc4eFVdfmynn93UVWXVtUPLXuO3Yl1AsD2Eg4AWBdb2xipqidV1ftuvtzd39Td713lcQ6vqq6qDTtp1J2qqu6T5FlJjuzuey17nt3B/P28oaqur6rPVNU5VfXjy55rd7Xahr0oBMCuJhwAcJuyC4LEfZJ8pruv2d477qmxZI2+tbvvnOSBSV6V5Her6peXOxIAsBbCAQC7zOInqVV1dFVtqqrPVtWnqupl883+ev57y/wJ9XdW1e2q6vlV9YmquqaqXl1Vd1t43J+al32mql6w4nlOraqzq+q1VfXZJE+an/sDVbWlqq6qqt+tqn0XHq+r6ueq6sKquq6qfqWq7ltV75/nPWvx9gv3+6Ek70py8Dz7q+brHzsfprGlqt5bVd+4Yp08p6r+KckNW4sHVfVNVfWuqvrXeV2dMl//dVX1W1V15fznt6rq6wbrvqvqfguXX1VVL56/fnhVXV5Vz57X71VVdXxVPaqq/mV+3lMW7nvqvA5ePa+f86rqqG1+82fd/enufk2Sn03yi1V1z/kx71ZVZ8zPfcV8uMc+82vcUlUPXnj+jVX1+ar6+vnysVX1ofl276+qbxmsg+H6WlgHp9R0mMmlVfWEFevr96vqL+bv7d9U1b3mx7i2qi6oqocs3P7gqvqTqtpcVZdU1dPXsv6q6jWZ4tOfzc/z7NXW6fwz9SvzTNdV1Tur6oCF5U+sr74/nrfivrerqudW1cfn5WdV1T3mZa+oqj9ZuO1La9pbpFabCYC9i3AAwLKcluS07r5rkvsmOWu+/vvmv/fv7jt39weSPGn+8wNJviHJnZP8bpJU1ZFJfj/JE5IclORuSQ5Z8VzHJTk7yf5JXpfkK0memeSAJN+Z5JgkP7fiPj+a5NuTPCzJs5OcnuQnk9w7yYOTPH7lC+rudyd5ZJIr59mfVFUPSPKGJCcn2ZjkHZk2ChfDw+OTPHp+zTcuPmZV3SXJu5P8ZZKDk9wvyTnz4ufN831bkm9NcnSS56+ca43uleQOmdbdLyX5w/n1fnuS703ygqo6YuH2j03yxkzr9G2Zvx/b4a1JNswzJ9NeCDdmen0PSfIjSX6mu7+Y5M255fr+sSR/1d3XzBvrf5zkvya5Z5I/SPK2QUBZbX3dK9PPxCFJTkxyelU9cMXzPn++zReTfCDJP8yXz07ysmTaGE/yZ0k+PD/WMUlOrqofXXisra6/7n5ikk8mecz8M/TrW199X+Mnkvx0kq9Psm+SX5hnOTLJK5I8MdPPzz2THLpwv6clOT7J98/Lr03ye/OyZyX55poOOfreJE9OcmJ39xpnAmAvIRwAsJ7+dP7Ud0tVbcm0QT/y5ST3q6oDuvv67v7bbdz2CUle1t0Xd/f1SX4xyQnzp/OPS/Jn3f2+7v5Spo3elRs2H+juP+3um7r78939we7+2+6+sbsvzbSx+f0r7vPr3f3Z7j4vyT8neef8/P+W5C8ybdyuxY8n+fPufld3fznJbyS5Y5LvWrjNb3f3Zd39+a3c/9gkV3f3b3b3F7r7uu4+d2G9vKi7r+nuzUlemGkD8db4cpKXzDO+MdPG8Gnz852X5KOZNrZv9r7ufkd3fyXJa1YsW9X8PJ9Oco+qOjDJo5Kc3N03zId5vDzJCfPNX7/wdTJtJL9+/vqkJH/Q3ed291e6+8xMG/UP28rTrmV9vaC7v9jdf5XkzzPFgpu9Zf7Z+UKStyT5Qne/el4Hb8pXfya+I8nG7n5Rd3+puy/OFGIWX8MOrb+teGV3/8v8M3RWpjiSTO+Pt3f3X88R5gVJblq431OSPK+7L5+Xn5rkcVW1obs/l2n9vCzJa5M8rbudWwHgNkg4AGA9Hd/d+9/8J1/7Kf6iJyd5QJILqurvq+rYbdz24CSfWLj8iUyfVh84L7vs5gXzxs5nVtz/ssULVfWAqnp7VV1d0+ELv5ppQ3nRpxa+/vxWLt95G/MOZ+/um+Z5FveKuGzlnRbcO8nH1/LY89cHr3GulT4zb8Qm0+tLtv2aF39jxOeS3KG24xwNVXX7THtg/GuSw5LcPslVC9HpDzJ9ep4k70myX1U9tKoOz7RR/JZ52WFJnrUiWN07W18Pq62va7v7hm0sX+vPxGGZDldZnOmUTD+vN9uh9bcVKx/v5llWvj9uyC3fH4clecvCnOdn2iPnwPn25ya5OEnlq3sFAXAbIxwAsBTdfWF3Pz7TxuFLk5xdVXfK1+4tkCRXZtrAudl9Mu3W/qkkV2Vh1+uqumOm3bFv8XQrLr8iyQVJ7j8fKnFKpg2jneEWs8/Hh987yRXbmG/RZZkOz1j1sTOtlysHt/1ckv0WLi/7Nz4cl+l7+HeZXuMXkxywEJ7u2t3flCRz0Dgr0+EKj8/0Cfp18+NclmlPif0X/uzX3W/YynOutr7uPv8Mjpav1WVJLlkx0126+1FrvP96HgpwVaaftyRJVe2XW74/LkvyyBWz3qG7r5hv/9QkX5dpPax6vgUA9k7CAQBLUVU/WVUb50/gt8xX35Rk8/z34sbyG5I8s6qOqKo7Z9pD4E3z+QDOTvKYqvqu+bwBp2b1CHCXJJ9Ncn1VPSjTifp2lrOSPLqqjpk/ZX9Wpo3k96/x/m9PclBVnTyf3O8uVfXQedkbkjy/ppMFHpDpMI3XDh7nQ0l+oqYTDj4iX3toxi5RVfeYTzr4e0le2t2f6e6rkrwzyW9W1V3nE/bdt6oWZ3x9psM+npCvHqaQTIcAPGXeG6Gq6k5V9ej53BArrWV9vbCq9p2P6T82yf++FS/z75JcV9NJL+84r/MHV9V3rPH+n8o4Fm2vs5McW1XfM78/XpRb/vvvfyV5SVUdlvz7iSePm79+QJIXZzrXxROTPLuqvm2d5gJgDyIcALAsj0hyXlVdn+lEiSfM5x/4XJKXJPmbeffph2U6+d1rMv3GhUuSfCHTSd0yH3//tEzH5V+V5Pok12TaOB/5hUzHyV+XacPzTev/8ibd/bFMG16/k+mY/sdkOvHdl9Z4/+uS/PB8v6uTXJjpJJHJtFG3Kck/JflIphP1vXjwUM+YH2NLpo3vP93uF7NjPjx/ry9K8jNJntndv7Sw/KcyndTvo5lO0Hd2ppNdJvn3XeZvyLTr/V8sXL8pyX/JdHLBa+fHf9JghtXW19XzY1yZ6SSaT+nuC7b3hc57SByb6ZCKSzJ93/8o04k71+J/ZAocW6rqF7b3+VfMcl6Sp2aKLVdlen2L5yk4LdPJGd9ZVdcl+dskD50Pm3htprjz4e6+MNOeOa8ZnHgSgL1YOTEuAHuTeY+ELZkOQ7hkyeOwh6iqhyd5bXcfuspNAeA2xx4HAOzxquoxVbXffHz6b2T6NPnS5U4FALB3EA4A2Bscl2n38iuT3D/TYQ92qQMAWAcOVQAAAACG7HEAAAAADAkHAAAAwNCGXflkBxxwQB9++OG78ikBAACAVXzwgx/8dHdv3NqyXRoODj/88GzatGlXPiUAAACwiqr6xGiZQxUAAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGNqw7AF2d4c/98+XPQIs1aW/9uhljwAAACyRPQ4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGFpTOKiqZ1bVeVX1z1X1hqq6Q1UdUVXnVtVFVfWmqtp3Zw8LAAAA7FqrhoOqOiTJ05Mc1d0PTrJPkhOSvDTJy7v7fkmuTfLknTkoAAAAsOut9VCFDUnuWFUbkuyX5KokP5jk7Hn5mUmOX/fpAAAAgKVaNRx09xVJfiPJJzMFg39L8sEkW7r7xvlmlyc5ZGcNCQAAACzHWg5VuHuS45IckeTgJHdK8oi1PkFVnVRVm6pq0+bNm2/1oAAAAMCut5ZDFX4oySXdvbm7v5zkzUm+O8n+86ELSXJokiu2dufuPr27j+ruozZu3LguQwMAAAC7xlrCwSeTPKyq9quqSnJMko8meU+Sx823OTHJW3fOiAAAAMCyrOUcB+dmOgniPyT5yHyf05M8J8nPV9VFSe6Z5IydOCcAAACwBBtWv0nS3b+c5JdXXH1xkqPXfSIAAABgt7HWX8cIAAAA3AYJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMDQhmUPALBTnXq3ZU8Ay3Pqvy17AgBgL2CPAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhtYUDqpq/6o6u6ouqKrzq+o7q+oeVfWuqrpw/vvuO3tYAAAAYNda6x4HpyX5y+5+UJJvTXJ+kucmOae775/knPkyAAAAsBdZNRxU1d2SfF+SM5Kku7/U3VuSHJfkzPlmZyY5fueMCAAAACzLWvY4OCLJ5iSvrKp/rKo/qqo7JTmwu6+ab3N1kgN31pAAAADAcqwlHGxI8h+SvKK7H5Lkhqw4LKG7O0lv7c5VdVJVbaqqTZs3b97ReQEAAIBdaC3h4PIkl3f3ufPlszOFhE9V1UFJMv99zdbu3N2nd/dR3X3Uxo0b12NmAAAAYBdZNRx099VJLquqB85XHZPko0neluTE+boTk7x1p0wIAAAALM2GNd7uaUleV1X7Jrk4yU9nig5nVdWTk3wiyY/tnBEBAACAZVlTOOjuDyU5aiuLjlnXaQAAAIDdylrOcQAAAADcRgkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMbVj2AAAAW/PNZ37zskeApfnIiR9Z9ggA/84eBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMDQmsNBVe1TVf9YVW+fLx9RVedW1UVV9aaq2nfnjQkAAAAsw/bscfCMJOcvXH5pkpd39/2SXJvkyes5GAAAALB8awoHVXVokkcn+aP5ciX5wSRnzzc5M8nxO2E+AAAAYInWusfBbyV5dpKb5sv3TLKlu2+cL1+e5JD1HQ0AAABYtlXDQVUdm+Sa7v7grXmCqjqpqjZV1abNmzffmocAAAAAlmQtexx8d5LHVtWlSd6Y6RCF05LsX1Ub5tscmuSKrd25u0/v7qO6+6iNGzeuw8gAAADArrJqOOjuX+zuQ7v78CQnJPm/3f2EJO9J8rj5ZicmeetOmxIAAABYiu35rQorPSfJz1fVRZnOeXDG+owEAAAA7C42rH6Tr+ru9yZ57/z1xUmOXv+RAAAAgN3FjuxxAAAAAOzlhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGVg0HVXXvqnpPVX20qs6rqmfM19+jqt5VVRfOf999548LAAAA7Epr2ePgxiTP6u4jkzwsyVOr6sgkz01yTnffP8k582UAAABgL7JqOOjuq7r7H+avr0tyfpJDkhyX5Mz5ZmcmOX4nzQgAAAAsyXad46CqDk/ykCTnJjmwu6+aF12d5MDBfU6qqk1VtWnz5s07MisAAACwi605HFTVnZP8SZKTu/uzi8u6u5P01u7X3ad391HdfdTGjRt3aFgAAABg11pTOKiq22eKBq/r7jfPV3+qqg6alx+U5JqdMyIAAACwLGv5rQqV5Iwk53f3yxYWvS3JifPXJyZ56/qPBwAAACzThjXc5ruTPDHJR6rqQ/N1pyT5tSRnVdWTk3wiyY/tlAkBAACApVk1HHT3+5LUYPEx6zsOAAAAsDvZrt+qAAAAANy2CAcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMbVj2AAAAwN7l/Ad947JHgKX6xgvOX/YI68oeBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADO1QOKiqR1TVx6rqoqp67noNBQAAAOwebnU4qKp9kvxekkcmOTLJ46vqyPUaDAAAAFi+Hdnj4OgkF3X3xd39pSRvTHLc+owFAAAA7A52JBwckuSyhcuXz9cBAAAAe4kNO/sJquqkJCfNF6+vqo/t7Odkr3NAkk8ve4jbqnrpsidgD+f9u0wvrGVPwJ7N+3eJ6knev+wQ799lqz3yPXzYaMGOhIMrktx74fKh83W30N2nJzl9B56H27iq2tTdRy17DmD7ef/Cnsv7F/Zc3r+stx05VOHvk9y/qo6oqn2TnJDkbeszFgAAALA7uNV7HHT3jVX135L8nyT7JPnj7j5v3SYDAAAAlm6HznHQ3e9I8o51mgVGHOoCey7vX9hzef/Cnsv7l3VV3b3sGQAAAIDd1I6c4wAAAADYywkHAAAAwJBwAMC6q9ozf3kxAABfyzkO2K1U1bckOTjJJ5Oc335AYY9RVY9J8rnuPme+XEnifQx7hqo6IsnhSS5Jcnl337jciYDtVVW36+6blj0Hex/hgN1GVR2U5C1Jrkhy3yQndveH/QcQdn9VddckH01ydZJzk/xhd3+oqvbp7q8sdzpgNfP/g9+U5KYk1yd5fXe/frlTAWu1+O/lqrpdpm5vQ491Ixyw26iqM5Jc0t0vrqrnJTksSSX5eJI3dvely5wP2Lqqqu7uqnpGkrskuSbJY5K8P8m3JfnZ7v7XJY4IrKKqXpnkwu7+1ap6VJKXJ3lsd39syaMBa1BVv5Nk3yTP7+7N83XiPevGOQ7YLVTVoUkelORl81VPzLTnwTuSfH2Sk5czGbCahU80/ibJMUk+nOR5SY5P8j1Jfng5kwFrUVWHZDpE4TVJ0t3vSPLOJP9pXn6fqjpmaQMC21RVRyd5XJJO8u6q+u9JcnM0qKo7LnE89hLCAbuF7r48ySO7+3NVdZckZ3b3C7v7LUlOTfKAqrrvUocEtqm7NyV5bZIDkvxLko1J/meSE6rqkcucDRjr7iuSPC3JtQtXvyrJg+evfzvJN+zisYDtc1p3PyXJ05N8T1W9v6r+47zsqVX1oCXOxl7AoQrs9uZPOV7S3Q9b9izAts3/MHl6kocm+X/dfXJV3be7P77k0YBtuPmQo/nr2ye5Y5LTk1yU5Oju/pFlzgdsW1VtuPmEpvN7+CeS/FSSg5Ls2933W+Z87Pk2LHsA2Jaq2i/T4QvPWfYswOq6+4Kqenem347y7Pnqi5c4ErAGiydR6+4vJ/lyVV2Z5JQkP7i0wYA1WfwtKPN7+MyqenOSK5P856UNxl5DOGC3VVX7JLlPkjO6+y+XPQ+wNt395qo6p7u/tPgJCLDHOT3J57v7vcseBLhVHpvkXf4dzXpwqAK7vcXdJwGAXcevRIY91/wh3J26+7PLnoU9n3AAAAAADPmtCgAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAz9f4NsWYSr5DvLAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "# generate color map\n", - "cmap = plt.cm.tab10\n", - "colors = cmap(np.arange(len(df)) % cmap.N)\n", - "\n", - "# column barplot\n", - "columns = []\n", - "for colname, hist in stan.histograms.items():\n", - " fig, ax = plt.subplots(figsize=(18, 6))\n", - " \n", - " ax.bar(np.arange(len(hist['x'])), hist['y'], color=colors)\n", - " ax.set_xticks(np.arange(len(hist['x'])))\n", - " ax.set_xticklabels(hist['x'], rotation=60)\n", - " ax.set_title(f\"Histogram for column {colname}\")\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This way, it is fairly easy to understand how imbalanced the target distribution might be, along with a quick pass to search for outliers, for example.\n", - "\n", - "# Final thoughts\n", - "\n", - "Lightwood automatically tries to leverage all the information provided by a `StatisticalAnalysis` instance when generating a predictor for any given dataset and problem definition. Additionally, it is a valuable tool to explore the data as a user. \n", - "\n", - "Finally, be aware that you can access these insights when creating custom blocks (e.g. encoders, mixers, or analyzers) if you want, you just need to pass whatever is necessary as arguments to these blocks inside the Json AI object.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/_sources/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.ipynb.txt b/docs/_sources/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.ipynb.txt deleted file mode 100644 index 17eb5d49c..000000000 --- a/docs/_sources/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.ipynb.txt +++ /dev/null @@ -1,751 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Time series forecasting\n", - "\n", - "## Introduction\n", - "\n", - "Time series are an ubiquitous type of data in all types of processes. Producing forecasts for them can be highly valuable in domains like retail or industrial manufacture, among many others.\n", - "\n", - "Lightwood supports time series forecasting (both univariate and multivariate inputs), handling many of the pain points commonly associated with setting up a manual time series predictive pipeline. \n", - "\n", - "In this tutorial, we will train a lightwood predictor and analyze its forecasts for the task of counting sunspots in monthly intervals.\n", - "\n", - "## Load data\n", - "\n", - "Let's begin by loading the dataset and looking at it:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
MonthSunspots
01749-0158.0
11749-0262.6
21749-0370.0
31749-0455.7
41749-0585.0
.........
28151983-0871.8
28161983-0950.3
28171983-1055.8
28181983-1133.3
28191983-1233.4
\n", - "

2820 rows × 2 columns

\n", - "
" - ], - "text/plain": [ - " Month Sunspots\n", - "0 1749-01 58.0\n", - "1 1749-02 62.6\n", - "2 1749-03 70.0\n", - "3 1749-04 55.7\n", - "4 1749-05 85.0\n", - "... ... ...\n", - "2815 1983-08 71.8\n", - "2816 1983-09 50.3\n", - "2817 1983-10 55.8\n", - "2818 1983-11 33.3\n", - "2819 1983-12 33.4\n", - "\n", - "[2820 rows x 2 columns]" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import pandas as pd\n", - "\n", - "df = pd.read_csv(\"https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/monthly_sunspots/data.csv\")\n", - "df" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This is a very simple dataset. It's got a single column that specifies the month in which the measurement was done, and then in the 'Sunspots' column we have the actual quantity we are interested in forecasting. As such, we can characterize this as a univariate time series problem.\n", - "\n", - "## Define the predictive task\n", - "\n", - "We will use Lightwood high level methods to state what we want to predict. As this is a time series task (because we want to leverage the notion of time to predict), we need to specify a set of arguments that will activate Lightwood's time series pipeline:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.api.high_level import ProblemDefinition" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "tss = {'nr_predictions': 6, # the predictor will learn to forecast what the next semester counts will look like (6 data points at monthly intervals -> 6 months)\n", - " 'order_by': ['Month'], # what column is used to order the entire datset\n", - " 'window': 12 # how many past values to consider for emitting predictions\n", - " }\n", - "\n", - "pdef = ProblemDefinition.from_dict({'target': 'Sunspots', # specify the column to forecast\n", - " 'timeseries_settings': tss # pass along all time series specific parameters\n", - " })" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, let's do a very simple train-test split, leaving 10% of the data to check the forecasts that our predictor will produce:" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(2538, 2) (282, 2)\n" - ] - } - ], - "source": [ - "cutoff = int(len(df)*0.9)\n", - "\n", - "train = df[:cutoff]\n", - "test = df[cutoff:]\n", - "\n", - "print(train.shape, test.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Generate the predictor object\n", - "\n", - "Now, we can generate code for a machine learning model by using our problem definition and the data:" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Analyzing a sample of 2467\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:from a total population of 2820, this is equivalent to 87.5% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Using 15 processes to deduct types.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Finished statistical analysis\u001b[0m\n" - ] - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from lightwood.api.high_level import (\n", - " json_ai_from_problem,\n", - " code_from_json_ai,\n", - " predictor_from_code\n", - ")\n", - "\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)\n", - "code = code_from_json_ai(json_ai)\n", - "predictor = predictor_from_code(code)\n", - "\n", - "# uncomment this to see the generated code:\n", - "# print(code)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Train\n", - "\n", - "Okay, everything is ready now for our predictor to learn based on the training data we will provide.\n", - "\n", - "Internally, lightwood cleans and reshapes the data, featurizes measurements and timestamps, and comes up with a handful of different models that will be evaluated to keep the one that produces the best forecasts.\n", - "\n", - "Let's train the predictor. This should take a couple of minutes, at most:" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Performing statistical analysis on data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Transforming timeseries data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Using 15 processes to reshape.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Splitting the data into train/test\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Preparing the encoders\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Encoder prepping dict length of: 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Done running for: Sunspots\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [1/100000] average_loss = 0.020042672178201507\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [2/100000] average_loss = 0.0077215013273975305\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [3/100000] average_loss = 0.0064399814919421546\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [4/100000] average_loss = 0.005441865690967493\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [5/100000] average_loss = 0.005300704742732801\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [6/100000] average_loss = 0.004992981385766414\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [7/100000] average_loss = 0.00491229374157755\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [8/100000] average_loss = 0.004856080601089879\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [9/100000] average_loss = 0.004799575188703704\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [10/100000] average_loss = 0.0047617426566910325\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [11/100000] average_loss = 0.004732183615366618\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [12/100000] average_loss = 0.004704843226232026\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [13/100000] average_loss = 0.004697896095744351\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [14/100000] average_loss = 0.004687661141679998\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [15/100000] average_loss = 0.004655592012823674\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [16/100000] average_loss = 0.004595928704529478\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [17/100000] average_loss = 0.004568418233018173\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [18/100000] average_loss = 0.004558674494425456\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [19/100000] average_loss = 0.004570525518634863\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [20/100000] average_loss = 0.004572713087525284\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [21/100000] average_loss = 0.004563712864591364\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [22/100000] average_loss = 0.004498099365778136\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [23/100000] average_loss = 0.004449873953534846\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [24/100000] average_loss = 0.004484773205037703\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [25/100000] average_loss = 0.004398583738427413\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [26/100000] average_loss = 0.004340721536100957\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [27/100000] average_loss = 0.004394709227377908\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [28/100000] average_loss = 0.004414253694969311\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [29/100000] average_loss = 0.0043628366892797905\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [30/100000] average_loss = 0.0042474141246394105\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [31/100000] average_loss = 0.004357850760744329\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [32/100000] average_loss = 0.004315985190240961\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [33/100000] average_loss = 0.00410254764975163\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [34/100000] average_loss = 0.004112129096399274\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [35/100000] average_loss = 0.004205447932084401\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [36/100000] average_loss = 0.004242659451668723\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [37/100000] average_loss = 0.0042895584252842685\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [38/100000] average_loss = 0.00440603481572971\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [39/100000] average_loss = 0.004132882597153647\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [40/100000] average_loss = 0.0040611259769975094\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [41/100000] average_loss = 0.00396897013772998\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [42/100000] average_loss = 0.003915625183205856\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [43/100000] average_loss = 0.003940282500626748\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [44/100000] average_loss = 0.004178977953760247\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training the mixers\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py:116: UserWarning: torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - " warnings.warn(\"torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\")\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/pytorch_ranger/ranger.py:172: UserWarning: This overload of addcmul_ is deprecated:\n", - "\taddcmul_(Number value, Tensor tensor1, Tensor tensor2)\n", - "Consider using one of the following signatures instead:\n", - "\taddcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at ../torch/csrc/utils/python_arg_parser.cpp:1005.)\n", - " exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)\n", - "\u001b[32mINFO:lightwood-46866:Loss of 0.539688229560852 with learning rate 0.0001\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Loss of 0.7796856760978699 with learning rate 0.00014\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Found learning rate of: 0.0001\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 1: 0.6908893585205078\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 2: 0.6882499903440475\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 3: 0.6850549429655075\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 4: 0.6813623607158661\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 5: 0.6772531270980835\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 6: 0.6728083938360214\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 7: 0.6652606427669525\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 8: 0.6601350754499435\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 9: 0.6548376232385635\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 10: 0.6494599282741547\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 11: 0.6441417187452316\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 12: 0.6389893442392349\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 13: 0.6309126764535904\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 14: 0.6257634907960892\u001b[0m\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 15: 0.6205589026212692\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 16: 0.6152833849191666\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 17: 0.6099573820829391\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 18: 0.6046575754880905\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 19: 0.5962131917476654\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 20: 0.5909084677696228\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 21: 0.5856661349534988\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 22: 0.5805662572383881\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 23: 0.575617328286171\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 24: 0.5707968175411224\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 25: 0.5632813721895218\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 26: 0.5587586611509323\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 27: 0.554344117641449\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 28: 0.5499386340379715\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 29: 0.5455891937017441\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 30: 0.5413248538970947\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 31: 0.5345934927463531\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 32: 0.5304456949234009\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 33: 0.526373103260994\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 34: 0.5223924517631531\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 35: 0.5184392035007477\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 36: 0.5145991444587708\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 37: 0.5086493343114853\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 38: 0.5050476491451263\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 39: 0.5015637576580048\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 40: 0.49815742671489716\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 41: 0.4948585033416748\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 42: 0.49173182249069214\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 43: 0.48690974712371826\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 44: 0.4839773178100586\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 45: 0.4811210632324219\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 46: 0.4783552885055542\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 47: 0.4757150560617447\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 48: 0.47318898141384125\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 49: 0.46942955255508423\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 50: 0.4671967923641205\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 51: 0.4650762975215912\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 52: 0.4630257934331894\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 53: 0.46110378205776215\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 54: 0.45930930972099304\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 55: 0.45666399598121643\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 56: 0.4550795406103134\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 57: 0.4535674601793289\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 58: 0.45216208696365356\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 59: 0.45088090002536774\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 60: 0.4496418982744217\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 61: 0.4477883279323578\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 62: 0.4467353969812393\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 63: 0.4457828402519226\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 64: 0.4448719322681427\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 65: 0.44403648376464844\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 66: 0.44328153133392334\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 67: 0.44207488000392914\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 68: 0.4413738548755646\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 69: 0.44084450602531433\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 70: 0.4403578191995621\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 71: 0.4398685395717621\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 72: 0.43935835361480713\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 73: 0.43840254843235016\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 74: 0.4378361850976944\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 75: 0.4375789165496826\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 76: 0.43739429116249084\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 77: 0.4372607320547104\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 78: 0.43708017468452454\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 79: 0.4364318400621414\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 80: 0.43584632873535156\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 81: 0.4356466382741928\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 82: 0.4355204701423645\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 83: 0.43557313084602356\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 84: 0.43554021418094635\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 85: 0.43514105677604675\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 86: 0.43462760746479034\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 87: 0.43442972004413605\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 88: 0.43443459272384644\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 89: 0.4344787895679474\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 90: 0.4345344454050064\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 1: 0.329136921600862\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 2: 0.3284675722772425\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 3: 0.33007449995387683\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 4: 0.32765168764374475\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 5: 0.3260806582190774\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 6: 0.3272357068278573\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 7: 0.3281749730760401\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM models for array prediction\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Ensembling the mixer\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Mixer: Neural got accuracy: 0.19612012470445245\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Mixer: LightGBMArray got accuracy: 0.21013741093675975\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Picked best mixer: LightGBMArray\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Analyzing the ensemble of mixers\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Adjustment on validation requested.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating the mixers\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py:116: UserWarning: torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - " warnings.warn(\"torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\")\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 1: 0.33339183280865353\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 2: 0.3303144524494807\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 3: 0.330986554423968\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 4: 0.3315189927816391\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 5: 0.33072087665398914\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 6: 0.33309372514486313\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating array of LGBM models...\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n" - ] - } - ], - "source": [ - "predictor.learn(train)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Predict\n", - "\n", - "Once the predictor has trained, we can use it to generate 6-month forecasts for each of the test set data points:" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Transforming timeseries data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Featurizing the data\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/pandas/core/indexing.py:1637: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " self._setitem_single_block(indexer, value, name)\n", - "\u001b[32mINFO:lightwood-46866:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n" - ] - } - ], - "source": [ - "forecasts = predictor.predict(test)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's check how a single row might look:" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
predictiontruthorder_Monthconfidencelowerupperanomaly
10[51.28799878891615, 46.76867159945164, 52.0899...51.0[-272577600.0, -269899200.0, -267220800.0, -26...[0.24, 0.24, 0.24, 0.24, 0.24, 0.24][30.80746268275371, 26.288135493289204, 31.609...[71.76853489507859, 67.24920770561408, 72.5704...False
\n", - "
" - ], - "text/plain": [ - " prediction truth \\\n", - "10 [51.28799878891615, 46.76867159945164, 52.0899... 51.0 \n", - "\n", - " order_Month \\\n", - "10 [-272577600.0, -269899200.0, -267220800.0, -26... \n", - "\n", - " confidence \\\n", - "10 [0.24, 0.24, 0.24, 0.24, 0.24, 0.24] \n", - "\n", - " lower \\\n", - "10 [30.80746268275371, 26.288135493289204, 31.609... \n", - "\n", - " upper anomaly \n", - "10 [71.76853489507859, 67.24920770561408, 72.5704... False " - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "forecasts.iloc[[10]]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You'll note that the point `prediction` has associated `lower` and `upper` bounds that are a function of the estimated `confidence` the model has on its own output. Apart from this, `order_Month` yields the timestamps of each prediction, `truth` lets us know what is the one-step-ahead observed value (if it exists at all). Finally, the `anomaly` tag will let you know if the observed value falls outside of the predicted region. \n", - "\n", - "\n", - "## Visualizing a forecast\n", - "\n", - "Okay, time series are much easier to appreciate through plots. Let's make one:\n", - "\n", - "NOTE: We will use `matplotlib` to generate a simple plot of these forecasts. If you want to run this notebook locally, you will need to `pip install matplotlib` for the following code to work." - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "code", - "execution_count": 69, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAtQAAAHwCAYAAACG+PhNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACIoklEQVR4nOzdd3xV9f3H8dc3i7BnSIAEwp6BAGEHmQICClWh4BZFQX9VW6tttdraWtvaVltrFUQRR0HBBcoQAigbZO8RCCPMkLAJZH1/f+SGBkwg4957bpL38/G4D5Jzzznf972HJJ+cfIex1iIiIiIiIkXj53QAEREREZGSTAW1iIiIiEgxqKAWERERESkGFdQiIiIiIsWgglpEREREpBhUUIuIiIiIFIMKahHxCmPMA8aYZU7ncJLJ9r4x5pQxZo3TeZzijfdB/99ExJtUUIt4mTFmvzEm1RhzPtejrtO5bsQY850x5mGnczjJDUVaLHAzEG6t7eymWI4wxvzeGPNxEQ936/tgjIk0xlhjTEBxz+VNxpgpxpiXnc5RVPqeIPI/KqhFnHGrtbZSrseRwhxc0goHuaIBsN9ae8HpIA4r8vug//ulhzHG3+kMIu6iglrERxhjyhlj/mmMOeJ6/NMYU871XG9jTKIx5lfGmGPA+8YYP2PMr40xe40xycaY6caYGrnOF2uMWWGMOW2MOWSMecC1fYgxZoMx5qxr++9zHRNsjPnYdb7TxpgfjDGhxpg/AT2BN1131N907d/CGLPAGJNijNlljBmZ61w1jTGzXO2sARrf4PXPMMYcM8acMcYsMca0zvXcFGPMW8aYua72lxtjwlzv0SljzE5jTPtc+7d03T07bYzZZoy5LddzV91Vu/aus+tO5zhjzB7X8f9xdVFoCUwAurkynM7nddR1ve4UY0y8MWasa/tDwLu5jn8pj2ObGGO+d70HJ40xn7q2/+gObO7XkfMajDF/d70fCcaYW655jfuMMedcz92da/tyY8ybrjZ3GmP6FeC1DAKeA37qei2brtfONa8xz/fBGDPW1UaKq826uY6xxpjHjTF7gD15vO1LXP+edp2zW65j83tPqhpj3jPGHDXGHDbGvGzyKfBM9t346caYD12vbZsxJuaa9+lzY0ySq50nXNtrmOyv21tdn1dyvcb7jDGPAHcDz7oyf51Hu8YY87ox5oTJ/jraYoxp43qunOu1HTTGHDfGTDDGlHc9l/P94lnXsUeNMcONMYONMbtd7/FzudrJ93uJce/3hCnGmLeNMXOMMReAPnm93yIlkrVWDz308OID2A/0z2P7H4BVQG0gBFgB/NH1XG8gA/grUA4oDzzp2j/ctW0iMM21fwPgHDAaCARqAtG5zhVF9i/UbYHjwHDXc48CXwMVAH+gI1DF9dx3wMO58lYEDgEPAgFAe+Ak0Mr1/CfAdNd+bYDDwLLrvC9jgMqu1/JPYGOu56a4zt0RCAYWAQnAfa6cLwOLXfsGAvFkF3xBQF/Xe9E8n9fxQO5cgAW+AaoB9YEkYFBe++bzOpYAb7lyRruO71uQ44FpwPOuaxMMxLq2R7pyBeTa98rrcJ03HRjrej/GA0cA43r/z+Z6/XWA1rmOywB+7nrffgqcAWoU4LX8Hvj4mv8PebaTx+u89j3v67q+HVzX/9/AkmuuyQKgBlA+j/Pl9f7k+564nv+S7K+ZimR/za0BHs0n7++BS8Bg17n+DKxyPecHrANeJPv/WyNgHzDQ9fwA4JirjUnAZ9f8v375Ov8fBrrOXc11LVsCdVzPvQ7Mcr0nlcn+uv3zNd8vXnRd17GuazfVtW9rIBVo6Nr/et9L3Pk9YQrZ/796uN63YKe/H+uhh7sejgfQQ4+y9iC7oD4PnHY9vnJt3wsMzrXfQLL/LJ7zAzIt9w8gYAfQL9fndVwFRADwG+DLAub5J/C66+MxZBfybfPY79ofnj8Fll6zz0Tgd64fvOlAi1zPvcINitFc+1Yju0Cq6vp8CjAp1/M/A3bk+jwKOO36uCfZBYxfruenAb/P53U8wI8L6thcn08Hfp3XvnnkjgAygcq5tv0ZmFLA4z8E3iG7b3Hu7ZHcuKCOz/VcBdf+YWQXOaeBO7imGHUdd6XIdG1bA9xbgNfye35cUOfZTh6v89r3/D3g1VyfV3L9/4nMdU36Xud8eb0/13tPQoHLuXOS/cvn4nzO/3sgLtfnrYBU18ddgIPX7P8b4P1cn/8b2EL2L5U1c22fwvUL6r7AbqArV/9/NsAFoHGubd2ABNfHvckumP1dn1d2vfYuufZfx/9+kb7e9xK3fE/I9Xo/vNHXvx56lMSHunyIOGO4tbaa6zHcta0ucCDXPgdc23IkWWsv5fq8AfCl68+wp8n+oZhJdrEQQXaB/iPGmC7GmMWuP0+fAcYBtVxPfwR8C3xisrudvGqMCcznNTQAuuS078pwN9kFSwjZP4wPXfN68mSM8TfG/MX1J+ezZP/SQa5ckH0nPUdqHp9Xcn1cFzhkrc26pu16+bWfh2O5Pr6Y69w3UhdIsdaeK2Lbz5JdLK1xdSsYU8DjIFdma+1F14eVbHY/5Z+SfZ2PGmNmG2Na5DrusLXWXpO3bmFfSwHauZ6r/u9ba88Dyde0dejagwogz/eE7P+7ga6cOf93J5J9F/mG5yL7/0Swye6C0wCoe83XwXNkfx3meIfsv9JMsdYmFzS8tXYR8CbwH+CEMeYdY0wVsr++KgDrcrU5z7U9R7K1NtP1carr3/y+Zq73vcRd3xNyFOU6ivg8FdQivuMI2T+QctR3bcthr96dQ8AtuQrzatbaYGvtYddz+fVZnkr2n4ojrLVVye4XbACstenW2pesta2A7sBQsrtV5Nf+99e0X8laO57sPy9nkF3Y5349+bkLGAb0B6qSfceRnFyFdASIMMbk/v5Wn+y7g5B9Z69Crudy/7C/kWvfg7zarmGMqZxP29c/ubXHrLVjrbV1yf5T+1vGmCauzFDE3Nbab621N5N953En2V0PctQzxuR+n3P+393otfzovbhBO9dz1f99Y0xFsrsp5X7frvfe3+i6XOsQ2Xeoa+X6v1vFWtv6Rgfmc66Ea74OKltrB8OVgXfvkP3Xh8dc17PAua21b1hrO5J9V7wZ8AzZ3ShSye5Sk9NmVWttQX/xy+s15Pm9xI3fEwr8mkVKIhXUIr5jGvBbY0yIMaYW2f0frzct2QTgT8aYBgCu44a5nvsv0N8YM9IYE2CyBwhGu56rTPadx0vGmM5kF7O4ztHHGBPlKgLOkv1n35w7vcfJ7h+a4xugmTHmXmNMoOvRyRjT0nVn7Avg98aYCsaYVsD913ktlckucJLJLhpfud4bdQOryb6D+KwrU2/gVrL7dANsBG535WoCPFSIcx8Hwo0xQXk9aa09RPafx//sGszV1nX+Ak0vZ4wZYYwJd316iuziI8tam0R2cXmP627+GG4wyDPXOUONMcNcReplsrsb5b57Xxt4wvVejSC7n+6cAryW40Bkzi8uBWjneqYBDxpjok32QNxXgNXW2v0FPD7J1VajG+0IYK09CswH/mGMqeIalNfYGNOrgO3ltgY4Z7IHDJd3XZ82xphOruefI/s6jgH+Bnxo/jf48dqvqau4vp66uO4IXyC7H3eW668vk4DXjTG1XfvWM8YMLEJ+uM73End9TyhiLpESQwW1iO94GVgLbCa7v+V617b8/IvsO83zjTHnyB5U1AXAWnuQ7AFUTwMpZBeR7VzHPQb8wXXMi2T3Ec4RBnxG9g/OHcD3ZP/JN6e9O032jAlvuLoCDABGkX2H8Rj/GzQJ8H9k/0n5GNl9J9+/zmv5kOw/+R8GtrteS5FYa9PILqBvIftO3lvAfdbana5dXie7P/px4AOyf/koqEXANuCYMeZkPvuMJvsO+xGyB779zlobV8DzdwJWG2POk31tn7TW7nM9N5bsu5PJZA8qW1HAc/oBv3DlSQF6kT1AL8dqoCnZ79WfgDtzdUu43muZ4fo32RizvgDt5Mt1zheAz4GjZP+yMKqAry+nO8efgOWurgZdC3DYfWQPItxO9i8vn5F9Z71QXL88DiV70GYC2e/ju0BVY0xHst+T+1z7/ZXs4vrXrsPfA1q5Mn+Vx+mrkF04nyL76yOZ7KIc4FdkD75d5eomFQc0L2x+l3y/l+De7wkipVbOaGcRESljTPZUig9ba2OdziIiUpLpDrWIiIiISDGooBYRERERKQZ1+RARERERKQbdoRYRERERKQYV1CIiIiIixRDgdIDiqFWrlo2MjHQ6hoiIiIiUcuvWrTtprQ3J67kSXVBHRkaydu1ap2OIiIiISClnjDmQ33Pq8iEiIiIiUgwqqEVEREREikEFtYiIiIhIMZToPtQiIiIiviI9PZ3ExEQuXbrkdBQphuDgYMLDwwkMDCzwMSqoRURERNwgMTGRypUrExkZiTHG6ThSBNZakpOTSUxMpGHDhgU+Tl0+RERERNzg0qVL1KxZU8V0CWaMoWbNmoX+K4MKahERERE3UTFd8hXlGqqgFhERESnF9u/fT5s2bZyO8SO9e/d2+3oia9eu5YknnnDrOQtCfahFREREpFAyMjIICPCtMjIjI4OYmBhiYmK83rbuUIuIiIiUEq+99hpt2rShTZs2/POf/7yyPSMjg7vvvpuWLVty5513cvHiRQB+/etf06pVK9q2bcsvf/lLAJKSkrjjjjvo1KkTnTp1Yvny5QD8/ve/595776VHjx7ce++9dO3alW3btl1pI+eO84ULFxgzZgydO3emffv2zJw5E4DU1FRGjRpFy5Yt+clPfkJqamqer6E4mb777juGDh0KkG+Obdu20blzZ6Kjo2nbti179uwp9vvuW79aiIiIiJQCT817io3HNrr1nNFh0fxz0D/zfX7dunW8//77rF69GmstXbp0oVevXlSvXp1du3bx3nvv0aNHD8aMGcNbb73Fgw8+yJdffsnOnTsxxnD69GkAnnzySX7+858TGxvLwYMHGThwIDt27ABg+/btLFu2jPLly/P6668zffp0XnrpJY4ePcrRo0eJiYnhueeeo2/fvkyePJnTp0/TuXNn+vfvz8SJE6lQoQI7duxg8+bNdOjQ4UevITk5uViZvvvuuyvn+tOf/pRnjgkTJvDkk09y9913k5aWRmZmZrGvjQpqERERkVJg2bJl/OQnP6FixYoA3H777SxdupTbbruNiIgIevToAcA999zDG2+8wVNPPUVwcDAPPfQQQ4cOvXJnNy4uju3bt18579mzZzl//jwAt912G+XLlwdg5MiRDBgwgJdeeonp06dz5513AjB//nxmzZrF3//+dyB79pODBw+yZMmSK/2b27ZtS9u2bX/0GqpWrVqsTLnll6Nbt2786U9/IjExkdtvv52mTZsW6f3OTQW1iIiIiJtd706yE66ducIYQ0BAAGvWrGHhwoV89tlnvPnmmyxatIisrCxWrVpFcHDwj86TU6wD1KtXj5o1a7J582Y+/fRTJkyYAGTP5fz555/TvHnzQucsbqbc8svRsmVLunTpwuzZsxk8eDATJ06kb9++hc6am/pQi4iIiJQCPXv25KuvvuLixYtcuHCBL7/8kp49ewJw8OBBVq5cCcDUqVOJjY3l/PnznDlzhsGDB/P666+zadMmAAYMGMC///3vK+fduHFjvm3+9Kc/5dVXX+XMmTNX7jgPHDiQf//731hrAdiwYQMAN910E1OnTgVg69atbN68+Ufnc0emHPnl2LdvH40aNeKJJ55g2LBheeYoLBXUIiIiIqVAhw4deOCBB+jcuTNdunTh4Ycfpn379gA0b96c//znP7Rs2ZJTp04xfvx4zp07x9ChQ2nbti2xsbG89tprALzxxhusXbuWtm3b0qpVqyt3nvNy55138sknnzBy5Mgr21544QXS09Np27YtrVu35oUXXgBg/PjxnD9/npYtW/Liiy/SsWPHH53PHZlulGP69Om0adOG6Ohotm7dyn333VfAdzh/JqdqL4liYmKsu+cvFBERESmKHTt20LJlS6djiBvkdS2NMeustXnOyac71D4iM6v4I0xFRERExPtUUPuA5IvJ1Hy1Jm/98JbTUURERESkkFRQ+4AtJ7Zw5vIZnpr3FKsSVzkdR0REREQKQQW1D9iTnL1CT7XgaoycMZKTF086nEhERERECkoFtQ/Yk7KHIP8gvrnrG45fOM49X9xDls1yOpaIiIiIFIAKah8QnxJPo+qN6FyvM28MeoNv937LK0tfcTqWiIiIiBSACmofsCdlD01rZC97+UjHR7g76m5eXPwicfviHE4mIiIipdnDDz981ZLeefnqq69uuI+79O7dm5wpkQcPHszp06cLnOvFF18kLs6Z2kkFtcOybBZ7U/bSpEYTIHsp0AlDJ9AypCV3fX4Xh88edjihiIiIlFbvvvsurVq1uu4+xS2oMzIyinTcnDlzqFatWr7PX5vrD3/4A/379y9SW8XlsYLaGDPZGHPCGLP1mu0/M8bsNMZsM8a8mmv7b4wx8caYXcaYgZ7K5WuOnDtCakbqlTvUAJWCKvHZiM+4mH6RUZ+PIj0z3cGEIiIiUhLs37+fFi1acPfdd9OyZUvuvPNOLl68CMDChQtp3749UVFRjBkzhsuXLwNX3xGuVKkSzz//PO3ataNr164cP36cFStWMGvWLJ555hmio6PZu3fvVW0+8MADjBs3jpiYGJo1a8Y333wDwJQpU7jtttvo27cv/fr148KFC4wZM4bOnTvTvn17Zs6cCUBqaiqjRo2iZcuW/OQnPyE1NfXKuSMjIzl5Mnuihg8//JC2bdvSrl077r333jxzPfDAA3z22WfXfb2RkZH87ne/o0OHDkRFRbFz5063vPcBbjlL3qYAbwIf5mwwxvQBhgHtrLWXjTG1XdtbAaOA1kBdIM4Y08xaW+pXO8mZ4aNpzaZXbW8Z0pJJt07iri/u4rmFz/G3AX9zIp6IiIgUwbyn5nFs4zG3njMsOoxB/xx03X127drFe++9R48ePRgzZgxvvfUW//d//8cDDzzAwoULadasGffddx9vv/02Tz311FXHXrhwga5du/KnP/2JZ599lkmTJvHb3/6W2267jaFDh3LnnXfm2eb+/ftZs2YNe/fupU+fPsTHxwOwfv16Nm/eTI0aNXjuuefo27cvkydP5vTp03Tu3Jn+/fszceJEKlSowI4dO9i8eTMdOnT40fm3bdvGyy+/zIoVK6hVqxYpKSnUqFEj31yXLl267uutVasW69ev56233uLvf/877777bgGvQP48dofaWrsESLlm83jgL9bay659Tri2DwM+sdZettYmAPFAZ09l8yXxKdn/6XK6fOQ2Omo0j8U8xt9X/p0vd3zp7WgiIiJSwkRERNCjRw8A7rnnHpYtW8auXbto2LAhzZo1A+D+++9nyZIlPzo2KCiIoUOHAtCxY0f2799foDZHjhyJn58fTZs2pVGjRlfu+t58883UqFEDgPnz5/OXv/yF6OhoevfuzaVLlzh48CBLlizhnnvuAaBt27a0bdv2R+dftGgRI0aMoFatWgBXzpmfG73e22+/vdCv8UY8eYc6L82AnsaYPwGXgF9aa38A6gG5VzRJdG0r9XKmzIuoEpHn868NfI01R9bwwMwHaBvalsY1Gns5oYiIiBTWje4ke4ox5rqfX09gYOCV/f39/Qvc9zm/NitWrHhlm7WWzz//nObNmxc4j6eUK1cOKNxrvBFvD0oMAGoAXYFngOmmMFcaMMY8YoxZa4xZm5SU5ImMXpUzZZ6/n3+ez5cLKMeMETPwN/7cOeNOUtNT89xPRERE5ODBg6xcuRKAqVOnEhsbS/Pmzdm/f/+VrhgfffQRvXr1KvA5K1euzLlz5/J9fsaMGWRlZbF371727duXZ9E8cOBA/v3vf2OtBWDDhg0A3HTTTUydOhWArVu3snnz5h8d27dvX2bMmEFycjIAKSkp181V3NdbFN4uqBOBL2y2NUAWUAs4DOS+RRvu2vYj1tp3rLUx1tqYkJAQjwf2tNxT5uUnslokH/3kIzYe28iT8570UjIREREpaZo3b85//vMfWrZsyalTpxg/fjzBwcG8//77jBgxgqioKPz8/Bg3blyBzzlq1Cj+9re/0b59+x8NSgSoX78+nTt35pZbbmHChAkEBwf/aJ8XXniB9PR02rZtS+vWrXnhhRcAGD9+POfPn6dly5a8+OKLdOzY8UfHtm7dmueff55evXrRrl07fvGLX1w3V3Ffb1GYnN8UPHJyYyKBb6y1bVyfjwPqWmtfNMY0AxYC9YFWwFSy+03XdW1veqNBiTExMTZnZGpJlGWzqPhKRR6LeYx/DPzHDfd/buFz/HnZn5kybAr3R9/vhYQiIiJSUDt27KBly5aOtb9//36GDh3K1q1bb7yzmzzwwAPXHbBYUuV1LY0x66y1MXnt78lp86YBK4HmxphEY8xDwGSgkWsqvU+A+113q7cB04HtwDzg8bIww8eRc0e4lHEpzwGJeflDnz/QO7I342ePZ8vxLR5OJyIiIiIF4bFBidba0fk8dU8++/8J+JOn8vii/KbMy0+AXwDT7phG+4ntuXPGnawdu5bK5Sp7MqKIiIiUEJGRkV69Ow3Z802LVkp01PWmzMtPWKUwPrnjE+JT4nn464fxZJcdEREREbkxFdQOutGUefnpFdmLV/q+wvRt03lzzZseSiciIiKFpRtdJV9RrqEKagftSdlD4+qN850y73qe6fEMtza7lafnP83qxNUeSCciIiKFERwcTHJysorqEsxaS3Jycp4zlVyPtxd2kVziU+IL1d0jNz/jxwfDP6DDOx0YMWMEGx7dQM0KNd2cUERERAoqPDycxMRESsM6GWVZcHAw4eHhhTpGBbVDsmwW8SnxDGg0oMjnqF6+OjNGzKDH5B7c8+U9zL5rNn5Gf3QQERFxQmBgIA0bNnQ6hjhA1ZdDCjtlXn5i6sbwr0H/Yl78PF5Z+oqb0omIiIhIQamgdkhhp8y7nkc7PspdUXfxu+9+x8J9C4t9PhEREREpOBXUDtmT4iqob7DseEEYY5g4dCLNazbnri/u4vDZPFdtFxEREREPUEHtkPiUeIL8gwivUrhO7/mpFFSJz0d+zoW0C4z6fBTpmeluOa+IiIiIXJ8KaocUZ8q8/LQMack7t77DsoPLeH7R8247r4iIiIjkTwW1Q4ozZd713BV1F+NjxvO3FX/jq51fuf38IiIiInI1FdQOyJkyzx39p/Py+sDXiakbwwNfPcCB0wc80oaIiIiIZFNB7YDDZw9zKeOSW2b4yEu5gHJ8cscnnLl8hmlbp3mkDRERERHJpoLaAfEp8QAe6fKRo3GNxrSs1ZKlB5d6rA0RERERUUHtCHdOmXc9Pev3ZPnB5WTZLI+2IyIiIlKWqaB2gLunzMtPbP1Yzlw+w7YT2zzajoiIiEhZpoLaAZ6YMi8vsfVjAdTtQ0RERMSDVFA7YE/yHo8NSMwtslok9SrXY9nBZR5vS0RERKSsUkHtZVk2i72n9tKkuucGJOYwxhBbP1YFtYiIiIgHqaD2Mk9PmXet2PqxHDp7SPNRi4iIiHiICmov88aUebnl9KPWXWoRERERz1BB7WXemjIvR1TtKKqUq6KCWkRERMRDVFB72Z7kPZTzL0dE1QivtOfv50/3iO6a6UNERETEQ1RQe1n8qXgaVW+En/HeWx8bEcu2pG2kpKZ4rU0RERGRskIFtZd5a8q83Ho26AnAikMrvNquiIiISFmggtqLvDllXm6d6nYi0C+QpQfU7UNERETE3VRQe5G3p8zLUT6wPDF1Y1h2SAMTRURERNxNBbUX5UyZ560ZPnLrWb8nPxz+gdT0VK+3LSIiIlKaqaD2opwp87w1B3VusfVjSc9K54cjP3i9bREREZHSTAW1F3l7yrzcukd0B7TAi4iIiIi7qaD2IiemzMtRs0JNWoe0VkEtIiIi4mYqqL3IiSnzcoutH8uKQyvIzMp0LIOIiIhIaaOC2ktypsxzYkBijtj6sZy5fIatJ7Y6lkFERESktFFB7SU5U+Y5MSAxR8/62Qu8qNuHiIiIiPuooPaSnBk+nLxDXb9qfcKrhGs+ahERERE3UkHtJTlzUDt5h9oYQ2z9WJYeWIq11rEcIiIiIqWJCmovcXLKvNxiI2I5fO4wB84ccDSHiIiISGmhgtpL4k/F07hGY0emzMutZwP1oxYRERFxJxXUXrIneY+j3T1ytA5pTdVyVVl6YKnTUURERERKBRXUXuALU+bl8Pfzp3tEdw1MFBEREXETFdRe4AtT5uXWs35PtidtJ/listNRREREREo8FdRe4AtT5uUWWz8WgOWHljucRERERKTkU0HtBTlT5jm57Hhunep1Isg/SAMTRURERNxABbUX5EyZF14l3OkoAAQHBNOpbicV1CIiIiJuoILaC/ak7PGJKfNyi60fy9oja0lNT3U6ioiIiEiJ5jsVXikWnxLvMwMSc8TWjyU9K501h9c4HUVERESkRFNB7WG+NGVebj0iegBa4EVERESkuDxWUBtjJhtjThhjtubx3NPGGGuMqeX63Bhj3jDGxBtjNhtjOngql7flTJnnawV19fLVaVO7DUsPaoEXERERkeLw5B3qKcCgazcaYyKAAcDBXJtvAZq6Ho8Ab3swl1flTJnna10+AGIjYllxaAWZWZlORxEREREpsTxWUFtrlwApeTz1OvAsYHNtGwZ8aLOtAqoZY+p4Kps37Ul2zUHtI1Pm5dazQU/OpZ1jy4ktTkcRERERKbG82ofaGDMMOGyt3XTNU/WAQ7k+T3RtK/HiU+J9asq83HIWeFE/ahEREZGi81pBbYypADwHvFjM8zxijFlrjFmblJTknnAe5ItT5uWoX7U+EVUi1I9aREREpBi8WeU1BhoCm4wx+4FwYL0xJgw4DETk2jfcte1HrLXvWGtjrLUxISEhHo5cfPEp8T43IDG32PqxLDu4DGvtjXcWERERkR/xWkFtrd1ira1trY201kaS3a2jg7X2GDALuM8120dX4Iy19qi3snlKzpR5vjggMUfP+j05cu4I+0/vdzqKiIiISInkyWnzpgErgebGmERjzEPX2X0OsA+IByYBj3kqlzclnk30ySnzcsvpR61uHyIiIiJFE+CpE1trR9/g+chcH1vgcU9lcUp8Sjzgm1Pm5WhduzXVgqux7OAy7mt3n9NxREREREoc3xspV4r48pR5OfyMHz0iemimDxEREZEiUkHtQb48ZV5usfVj2XFyBycvnnQ6ioiIiEiJo4Lag3x5yrzccvpRLz+43OEkIiIiIiWPb1d6JdyelD0+PSAxR6e6nSjnX07dPkRERESKQAW1h2TZLPam+PaUeTnKBZSjU71OmulDREREpAhUUHtI4tlELmdeLhF3qAFiI2JZd3QdF9MvOh1FREREpERRQe0hOVPm+fIMH7n1bNCTjKwM1hxe43QUERERkRJFBbWH5EyZVxK6fAB0C++GwbD0gLp9iIiIiBSGCmoP2ZOyp0RMmZejevnqtKndhmWHNDBRREREpDBUUHtIfEp8iZgyL7fY+rGsOLSCjKwMp6OIiIiIlBglp9orYUrKlHm59azfk/Np59l8fLPTUURERERKDBXUHpAzZV5JK6hzFnjRfNQiIiIiBaeC2gNypswrKQMSc0RUjaB+1foqqEVEREQKQQW1B+TM8FFSpszLrWf9niw9uBRrrdNRREREREoEFdQekDMHdUm7Qw3Z3T6OnT/GvlP7nI4iIiIiUiKooPaAPSl7CA4ILjFT5uWmftQiIiIihaOC2gPiU+JpXL1kTZmXo1VIK6oHV1dBLSIiIlJAJa/iKwH2pOwpkd09APyMHz3q92DpQa2YKCIiIlIQKqjdrKROmZdbbEQsu5J3kXQhyekoIiIiIj5PBbWbldQp83Lr2aAnAMsPLXc4iYiIiIjvU0HtZiV5yrwcHet0pJx/OZYeULcPERERkRtRQe1mOVPmleQuH+UCytG5XmeWHdLARBEREZEbUUHtZjlT5tWrUs/pKMXSs35P1h9dz4W0C05HEREREfFpKqjdbE/KnhI7ZV5usfVjycjKYPXh1U5HEREREfFpJbvq80HxKfElekBijm4R3TAYzUctIiIicgMqqN2oNEyZl6NacDWiQqNUUIuIiIjcgApqN8qZMq8kz/CRW8/6PVmZuJKMrAyno4iIiIj4LBXUbpQzZV5p6PIB2f2oz6edZ9OxTU5HEREREfFZKqjdaE+Kaw7qUtDlA7ILakDdPkRERESuQwW1G8WnxJeKKfNyhFcJJ7JaJEsPaoEXERERkfyooHaj0jJlXm6x9WNZdnAZ1lqno4hcV0ZWBhPWTuBSxiWno4iISBlTeio/HxCfEl9qBiTmiI2I5fiF4+w9tdfpKCLX9c3ubxg/ezxf7fzK6SgiIlLGqKB2k5wp85pULx0DEnP0bNATgKUH1O1DfNuihEUAbDm+xeEkIiJS1qigdpNDZw6VqinzcrSo1YIa5WtoYKL4vIUJCwHYckIFtYiIeJcKajeJT4kHSs+UeTn8jB89Inqw7JAKavFdx84fY3vSdvyMnwpqERHxOhXUblLapszLrWf9nuxO3s2JCyecjiKSp8UJiwG4rflt7D+9n3OXzzmcSEREyhIV1G5S2qbMy03zUYuvW5iwkGrB1biv7X0AbEva5nAiEREpS1RQu0lpnDIvR8e6HQkOCFZBLT5rUcIi+kT2oV1YO0ADE0VExLtKX/XnkD3Je0rdgMQcQf5BdKnXRQW1+KSEUwkknE6gb8O+RFaLpGJgRfWjFhERr1JB7QaZWZnsPbW3VPafzhFbP5b1R9dzPu2801FErpIzXV7fhn3xM360qd1GBbWIiHiVCmo3SDybSFpmWqmb4SO32PqxZNpMVieudjqKyFUW7V9EWKUwWtZqCUBU7Si2HN+i1T1FRMRrVFC7Qc6UeaX5DnW38G4YjLp9iE+x1rIoYRF9G/bFGANAVGgUyanJHDt/zOF0IiJSVqigdoOcKfNK8x3qqsFVaRfWjqUHtWKi+I4dJ3dw7Pwx+jXsd2Vbm9ptAC3wIiIi3qOC2g32JO8ptVPm5RYbEcuqxFWkZ6Y7HUUEuLr/dI6o2lGAZvoQERHvUUHtBvGn4mlSo0mpnDIvt9j6sVxIv8Cm45ucjiICZM8/3bBaQyKrRV7ZFlIxhNCKobpDLSIiXlO6K0Av2ZO8p1R398jRo34PAFYlrnI4iUj27Drf7f/uqu4eOaJCo1RQi4iI16igLqayMGVejnqV61GrQi02HtvodBQRNhzbwOlLp6/q7pEjqnYU25O2k5mV6UAyEREpazxWUBtjJhtjThhjtuba9jdjzE5jzGZjzJfGmGq5nvuNMSbeGLPLGDPQU7ncrSxMmZfDGEN0WLQKavEJOf2n+zTs86PnompHcSnjEntP7fV2LBERKYM8eYd6CjDomm0LgDbW2rbAbuA3AMaYVsAooLXrmLeMMf4ezOY2OTN8lIU71ADRodFsPbFVAxPFcYsSFtE6pDVhlcJ+9FxUqAYmioiI93isoLbWLgFSrtk231qb4fp0FRDu+ngY8Im19rK1NgGIBzp7Kps7XZmDupQuO36t6LBoLmdeZlfyLqejSBmWlpnG0oNL8+zuAdAqpBUGo37UIiLiFU72oR4DzHV9XA84lOu5RNc2n5czZV7dynWdjuIV0WHRAOr2IY5anbiai+kX8y2oKwRWoEmNJiqoRUTEKxwpqI0xzwMZwH+LcOwjxpi1xpi1SUlJ7g9XSGVlyrwczWs1JzggmA1HNzgdRcqwRQmL8DN+9I7sne8+UaFR6vIhIiJe4fUq0BjzADAUuNtaa12bDwMRuXYLd237EWvtO9baGGttTEhIiEezFkRZmTIvR4BfAFG1o9h4fKPTUaQMW5iwkA51OlAtuFq++0TVjiI+JZ6L6Re9F0xERMokrxbUxphBwLPAbdba3D/lZgGjjDHljDENgabAGm9mK4qyNGVebjkzffzv9yHxFZcyLvHaytc4e/ms01E85kLaBVYlrqJvZN7dPXJE1Y7CYtmetN1LyUREpKzy5LR504CVQHNjTKIx5iHgTaAysMAYs9EYMwHAWrsNmA5sB+YBj1trfX4C2Zwp88piQZ2SmkLi2USno8g1Xlj0Ak/Pf5rJGyY7HcVjlh1cRnpWOv0a/XhBl9w004eIiHhLgKdObK0dncfm966z/5+AP3kqjyfkTJlXlrp8wNUDEyOqRlx/Z/GalYdW8o+V/wDgq51f8VTXp5wN5CGLEhYR6BdIj4ge192vcfXGBAcEa2CiiIh4XNkYSechZW3KvBxtQ9tiMGw4poGJviI1PZUHZj5ARNUInuj8BEsPLiX5YrLTsTxi0f5FdIvoRsWgitfdz9/Pn1YhrVRQi4iIx6mgLoayNmVejkpBlWhas6mmzvMhLyx+gd3Ju3nvtve4r919ZNksvtn9jdOx3O5U6inWHVl3w/7TOaJqa6YPERHxPBXUxVDWpszLTUuQ+47lB5fz2srXGNdxHP0b9adDnQ6EVwln5q6ZTkdzu+8PfI/F5jv/9LWiakdx/MJxki44P8WmiIiUXmWvEnSjPcl7ytyAxBzRodEknE7g9KXTTkcp0y6mX+TBmQ9Sv2p9Xr35VQCMMQxrPox58fNK3ZRxixIWUSGwAl3CuxRo/5yBiVtPbPVkLBERKeNUUBdRzpR5ZW1AYo6cgYmbj292NkgZ99tFv2VPyh4mD5tM5XKVr2wf1nwYqRmpxO2LczCd+y1MWEjP+j0J8g8q0P5RtV0zfagftYiIeJAK6iIqq1Pm5Whfpz2gJcidtOzgMv656p88FvPYj7pA9IrsRdVyVZm5s/R0+zh2/hjbk7YXuLsHQFilMGqWr6l+1CIi4lEqqIuorE6ZlyOsUhihFUM104dDcrp6RFaL5K83//VHzwf5BzG46WC+3v01mVk+P6V7gSxOWAxAv4bXn386N2NM9hLkukMtIiIepIK6iMrqlHm5aWCic55b+BzxKfFMHjaZSkGV8txneIvhJF1MYmXiSi+n84yFCQupFlztSnejgoqqHcXWE1vJslmeCSYiImWeCuoi2pO8h/IB5cvclHm5RYdFs+3ENtIy05yOUqYsPbCUN1a/weOdHqd3ZO989xvUZBBB/kF8tfMrr2XzpEUJi+gd2Rt/P/9CHRdVO4oL6RfYf3q/Z4KJiEiZp4K6iPak7KFxjcZlcsq8HNFh0aRnpbMjaYfTUcqMC2kXeHDmgzSs3pC/9P/LdfetUq4KfRv25audX2Gt9VJCz0g4lUDC6YRCdffIoSXIRUTE08puNVhM8SnxZXZAYo72YRqY6G3PLXyOvaf2Mvm2/Lt65Da8+XD2ntrL9qTtXkjnOYsSFgEUakBijtYhrQHN9CEiIp6jgroIyvqUeTma1GhChcAKGpjoJd/v/5431rzBzzr/jF6RvQp0zK3NbwUo8d0+Fu1fRFilMFrWalnoYyuXq0zDag1VUIuIiMeooC6Csj5lXg5/P3/ahrbVHWovuJB2gTGzxtC4emP+3O/PBT6ubuW6dKnXpUSvmmitZVHCIvo27IsxpkjnaFO7jbp8iIiIx6igLoKcKfPK8gwfOaJDs2f6KOl9dH3dr+N+TcKpBN4f9j4VgyoW6thhzYfxw5EfOHz2sIfSedaOkzs4dv4YfSML390jR1TtKHYn7+ZyxmU3JhMREcmmgroI9iSX7Tmoc4sOi+bM5TMcOHPA6Sil1nf7v+PNH97kiS5P0LNBz0IfP7zFcABm7Zrl5mTekdN/ul+jwg9IzBEVGkWmzWTnyZ3uiiUiInKFCuoiiE+JL/NT5uXImRNY3T4843zaeR6c+SBNajThlX6vFOkcLWq1oFnNZny16yv3hvOShQkLaVitIZHVIot8Di1BLiIinqSCugg0Zd7/RIVG4Wf8VFB7yK8W/IoDpw/w/rD3qRBYoUjnMMYwrPkwFics5sylM25O6FmZWZl8t/+7Is3ukVuzms0I9AtUP2oREfEIVYRFoCnz/qdCYAWa12yumT48YFHCIt5a+xZPdX2K2PqxxTrX8BbDSc9KZ278XDel844NxzZw+tLpIs0/nVugfyAtQ1rqDrWIiHiECupCypkyTwX1/2gJcvc7d/kcD816iKY1mvJy35eLfb4u9bpQu2LtEjd9Xk7/6T4N+xT7XFG1o1RQi4iIR6igLqRDZw+RlpmmAYm5RIdFc/DMQVJSU5yOUmo8u+DZYnf1yM3fz5/bmt3GnD1zStRMF4sSFtEqpBVhlcKKfa6o2lEknk3kVOopNyQTERH5HxXUhRRWKYylDy5laLOhTkfxGTkDEzcd2+RskFJi4b6FTFg3gZ93/Tk96vdw23mHtxjOubRzfLf/O7ed05PSMtNYenBpsbt75MhZgnzria1uOZ+IiEgOFdSFFBwQTGz9WOpUruN0FJ/hazN9HDl3hIjXI3jk60c4ceGE03EKJaerR7OazdzS1SO3fo36UTGwYolZ5GV14moupl8s9oDEHJrpQ0REPEUFtRRb7Yq1qVu5rs8MTJyzZw6JZxOZvGEyTf/dlL+v+HuJ6ebwzIJnOHT2EFOGTaF8YHm3njs4IJhBTQYxc9dMsmyWW8/tCYsSFuFn/OjVoGDLrN9IeJVwqparqpk+RETE7VRQi1v40sDEuH1x1K1cl22PbaNn/Z48s+AZWr/Vmpk7Z/r0io4L9i5g4rqJ/KLrL+gW0c0jbQxrPowj546w9shaj5zfnRYmLKRDnQ5UL1/dLeczxhAVqoGJIiLifiqoxS2iQ6PZcXIHlzIuOZojy2axMGEh/Rv1p3mt5nxz1zfMu3seQf5BDP90ODd/dLNP3qE8e/ksD816iOY1m/OHPn/wWDtDmg3B3/gzc6dvd/u4kHaBVYmrirXceF6iakex9cRWn/7FSkRESh4V1OIW0WHRZGRlsD1pu6M5Nh/fzMmLJ+nfsP+VbQObDGTTuE38+5Z/s/7oeqInRvPY7Mc4efGkg0mv9sv5v+TwucNMGe7+rh651Shfg5sa3OTzqyYuP7Sc9Kx0t/WfztGmdhvOXD5D4tlEt55XRETKNhXU4hbt67QHnB+YGLcvDsgegJdboH8g/9f5/4h/Ip7HOz3OO+veoem/m/LPVf8kPTPdiahXzN87n0nrJ/HLbr+ka3hXj7c3vMVwtidtZ0/yHo+3VVQL9y0k0C+w2AvaXEsDE0VExBNUUItbNKreiEpBldhw1NmBiXH74mgV0oq6levm+XyN8jV445Y32Dx+M13qdeHn3/6cqLejmLNnjpeTZjtz6QwPz3qYlrVa8lKfl7zS5rDmwwB8eraPRfsX0TW8KxWDKrr1vG1qtwHwyW4/IiJScqmgFrfwM360C23HxuMbHctwOeMySw4suaq7R35ahbRi7t1z+Wb0N1gsQ6YO4Zb/3sKOpB1eSJotMyuTX3z7iytdPYIDgr3SboNqDYgOi/bZVRNPpZ5i3ZF1bpt/Orfq5asTXiVcd6hFRMStVFCL20SHRbPp2CbHpmRbmbiS1IxU+je6cUEN2bM+DGk2hC3jt/DagNdYeWglUW9H8cTcJ9y+6mNmVibbTmzjw00f8sTcJ+gxuQdV/lKFyRsn80z3Z+hcr7Nb27uR4c2Hs+LQCp+cp/v7A99jsW7vP51DS5CLiIi7qaAWt4kOi+Zc2jkSTiU40n7cvjj8jT+9Igs3b3GQfxA/7/Zz9vxsD2M7jOU/P/yHpv9uyptr3iQjK6PQOXIXz0/OffJK8dzm7Tbc/9X9vLfhPQyGsR3GMvX2qW5fwKUghrUYhsXy9a6vvd72jSxKWESFwAp0Ce/ikfNH1Y5iR9IOx/vOi4hI6RHgdAApPdqH/W9gYuMajb3efty+OLqEd6FKuSpFOj6kYghvD32b8Z3G8/Nvf87P5v6Mt9e+zesDX2dA4wF5HpOZlcnOkztZd3Qd646sY93RdWw4toGL6RcBqBBYgfZh7RnbYSwd63SkY92ONK/ZHH8//yK/TndoF9qOBlUbMHPXTB7q8JCjWa61MGEhPev3JMg/yCPnjwqNIj0rnd3Ju2ldu7VH2hARkbJFBbW4TevarfE3/mw8tpE7Wt3h1bZPXzrND0d+4Lc9f1vsc7UNbUvcvXHM3DWTX87/JQM/HsjQZkN5tf+rZNmsGxbPD7d/mI51OxJTN8Yniue8GGMY1nwY76x/hwtpF9w++K+ojp0/xvak7dzf7n6PtZF7pg8V1CIi4g4qqMVtggOCaRnS0pElyL/b/x1ZNqvA/advxBjD8BbDuaXJLfxr9b94ecnLtHqr1ZXnry2eO9bpSItaLXyyeM7P8BbDeWPNG8zfO5+ftPyJ03EAWJywGMBj/aeB7Otk/NlyfAuj2ozyWDsiIlJ2qKAWt4oOi75SFHlT3L44KgZWdHu/23IB5Xi2x7Pc3+5+pmycQp3KdUpk8ZyXng16Uj24Ol/t+spnCuqFCQupFlztSvchTygXUI7mtZprYKKIiLiNCmpxq+jQaD7e/DFJF5IIqRjitXbj9sXRK7KXx/rdhlYK5Vexv/LIuZ0S4BfA0GZD+Wb3N2RkZRDg5/y3g0UJi+gd2dvjv6xE1Y5izeE1Hm1DRETKDs3yIW4VHRYNwKbjm7zW5qEzh9iVvKtA80/L1Ya3GE5KagrLDi5zOgoJpxJIOJ1A30jPdffIEVU7ioTTCZy7fM7jbYmISOmnglrcKqeg9uYS5AsTFgK4rf90WTKg8QDK+ZfziUVeFiUsAn68bLwnRIVmD0zclrTN422JiEjpp4Ja3KpmhZpEVInw6sDEuH1x1K5Y+8qy0lJwlYIqcXPjm5m5aybWWkezLNq/iLBKYbSs1dLjbWkJchERcScV1OJ20WHRXrtDba0lbl8c/Rv1xxjjlTZLm+HNh7P/9H42H9/sWAZrLYsSFtG3YV+vXMfIapFUDKyogYkiIuIWKqjF7aLDotl5ciep6akeb2vria0cv3Bc/aeLYWizoRiMo90+dpzcwbHzx7zSfxrAz/jRpnYbFdQiIuIWKqjF7aLDosmyWWw9sdXjbcXtiwO80++2tAqtFEr3iO7M3DXTsQw5/ac9Of/0taJqR7Hl+BbHu7qIiEjJp4Ja3C73EuSeFpcQR7Oazahftb7H2yrNhjUfxoZjGzhw+oAj7S9MWEjDag1pWL2h19qMCo0iOTWZY+ePea1NEREpnVRQi9tFVoukSrkqHi+o0zLT+H7/9+ru4QbDWwwHYNauWV5vOzMrk+/2f+fVu9Nw9RLkIiIixXHDgtoY08MYU9H18T3GmNeMMQ08H01KKmMM0WHRHp/pY3Xiai6kX9B0eW7QtGZTWtZqyVe7vvJ62xuPbeT0pdPeL6hdU+dppg8RESmugtyhfhu4aIxpBzwN7AU+9GgqKfGiQ6PZfHwzmVmZHmsjbl8cfsaP3pG9PdZGWTK8xXC+3/89p1JPebXdnHnEvV1Q16pQi7BKYbpDLSIixVaQgjrDZo/aGQa8aa39D1DZs7GkpIsOi+ZC+gX2ntrrsTbiEuKIqRtD9fLVPdZGWTK8xXAybSaz98z2aruLEhbRKqQVYZXCvNouZHf78MbgWRERKd0KUlCfM8b8BrgHmG2M8QMCb3SQMWayMeaEMWZrrm01jDELjDF7XP9Wd203xpg3jDHxxpjNxpgORX1B4hva1/HswMSzl8+yOnG1+k+7UUzdGOpUquPV6fPSMtNYenCp16bLu1ZU7Si2JW3z6F9SRESk9CtIQf1T4DLwkLX2GBAO/K0Ax00BBl2z7dfAQmttU2Ch63OAW4CmrscjZHczkRKsVUgrAv0CPVZQf7//ezJtpvpPu5Gf8WNY82HMi5/HpYxLXmlzdeJqLqZfdGzaw6jQKC5lXPLoX1JERKT0K0hB/XNr7WvW2qUA1tqDQOsbHWStXQKkXLN5GPCB6+MPgOG5tn9os60Cqhlj6hQgm/ioIP8gWoW08tjAxLh9cZQPKE+3iG4eOX9ZNazFMC6kX2DhvoVeaW9RwiIMhl4NenmlvWtdmelDAxNFRKQYClJQ35zHtluK2F6otfao6+NjQKjr43rAoVz7Jbq2SQnmySXI4xLi6NmgJ8EBwR45f1nVJ7IPlYMqe22Rl4UJC+lQp4Nj/eBbhrTEYDQwUUREiiXfgtoYM94YswVo7urXnPNIADYXt2HXQMdCL1FmjHnEGLPWGLM2KSmpuDHEg6LDojl2/pjbF844cu4I25O2q/+0B5QLKMctTW9h1q5ZZNksj7Z1Ie0CqxJX0a+hc6tcVgisQJMaTVRQi4hIsVzvDvVU4FZgluvfnEdHa+09RWzveE5XDte/J1zbDwMRufYLd237EWvtO9baGGttTEhISBFjiDfkrJi46dgmt543pzuC+k97xvDmwzl+4TirE1d7tJ3lh5aTnpXu9enyrhUVGqUuH15y5tIZfv/d7zlx4cSNdxYRKUHyLaittWestfuttaOBavyvoI7I75gCmAXc7/r4fmBmru33uWb76AqcydU1REqodmHtAPfP9BGXEEfN8jWvnF/ca3DTwQT6BXp8to+F+xYS6BdIbP1Yj7ZzI1G1o4hPiedi+kVHc5QF/17zb176/iUGfTyIM5fOOB1HRMRtCrJS4hPAf4HarsfHxpifFeC4acBKsruMJBpjHgL+AtxsjNkD9Hd9DjAH2AfEA5OAx4rwWsTHVAuuRmS1SDYe3+i2c1pridsXR79G/fAzBRkCIIVVNbgqvSN7e3zVxEX7F9E1vCsVgyp6tJ0biaodhcWyPWm7ozlKu8ysTCatn3Sli81tn9xGanqq07FERNyiIBXJw0AXa+2L1toXga7A2BsdZK0dba2tY60NtNaGW2vfs9YmW2v7WWubWmv7W2tTXPtaa+3j1trG1tooa+3a4r0s8RXRYdFsOOq+mT52ntzJkXNH1H/aw4a3GM7u5N3sPLnTI+ffemIr64+ud7y7B2gJcm+ZFz+Pg2cO8krfV/joJx+x9MBSfvrZT8nIynA6mohIsRWkoDZA7lUPMl3bRG4oOjSa3cm7uZB2wS3ni9sXB6j/tKfd1vw2ALd2+7DWsuTAEm6dditRb0cRHBDMiFYj3Hb+ompcvTHlA8prYKKHTVg3gdCKoQxrMYxRbUbxn8H/4evdX/PQrIc8PgBWRMTTClJQvw+sNsb83hjzErAKeM+zsaS0aF+nPRbrtmIlLiGORtUb0bB6Q7ecT/IWXiWcmLoxbpk+LyMrg+nbptPl3S70mtKL1Ymrean3S+x/cj+ta99wSnuP8/fzp1VIKy1B7kEHzxxkzp45PNT+IYL8gwAY32k8f+j9Bz7c9CFPf/s02RM/iYiUTAE32sFa+5ox5jsgluxp7h601npmtQ4pdaLDooHsgYldw7sW61wZWRksTljM6Daj3ZBMbmRY82G8sPgFjp47Sp3KhV9n6XzaeSZvmMzrq15n/+n9NK3RlAlDJnBfu/soH1jeA4mLLio0innx85yOUWq9u/5drLWM7Xh1b8Hf3vRbklOT+efqf1KzQk1+e9NvHUooIlI8hRnVZa75V+SGIqpEUD24ultm+vjh8A+cSzun7h5eMrzFcABm7ZpVqOOOnjvKcwufI+L1CJ6c9yT1Ktfjq59+xc7/28mjMY/6XDEN2QMTj50/xsmLJ52OUuqkZ6bz7vp3GdRkEJHVIq96zhjDawNf49629/LC4hd4+4e3nQkpIlJMBZnl40WylwmvDtQC3jfG6DaCFIgxJntgohuWII/bF4fB0KdhHzckkxtpHdKaxtUbF7jbx/ak7Tw08yEi/xXJX5b9hX4N+7FizAqWjVnGsBbDfHpWFi1B7jnf7P6Go+ePMi5mXJ7P+xk/3rvtPW5tdiuPz3mcT7Z+4uWEIiLFV5CfcHcDnay1v7fW/o7sWT7u9WwsKU2iw6LZfHxzsUfzxyXE0b5Oe2pVqOWmZHI9xhiGNR/GwoSFnLt8Ls99rLUsTljMkKlDaP1Wa6ZtncbYDmPZ/bPdfDbyM7pFdPNy6qK5MtOHBia63YR1E6hXuR6Dmw7Od59A/0A+vfNTejboyb1f3qvuNyJS4hSkoD4CBOf6vBz5rGIokpfosGguZVxiT/KeIp/jfNp5Vh5aqenyvGx4i+GkZab9qMDJyMpg2pZpxEyKoe+Hffnh8A/8ofcfOPjzg7w5+E2a1GjiUOKiCa0YSq0KtXSH2s32puxl/t75jO0wlgC/6w/ZKR9YnlmjZhFVO4rbP72dFYdWeCmliEjxFaSgPgNsM8ZMMca8D2wFThtj3jDGvOHZeFIa5CxBXpx+1EsPLCU9K139p72se0R3alWodWWRl3OXz/HPVf+k8RuNueuLu7iQdoF3hr7DgacO8EKvF0rsXw+MMbSp3UZ3qN1s0vpJ+Bt/Hu7wcIH2rxpclXn3zCO8SjhDpg5h8/HNHk4oIuIeBSmovwSeAxYD3wHPk71k+DrXQ+S6WtRqQZB/ULEK6rh9cZTzL+f4MtVljb+fP7c2u5XZu2fz67hfE/F6BD//9uc0qNqAmaNmsv3x7YztONYnBxoWVlTtKLae2Ko5kd3kcsZlJm+YzNBmQ6lXpV6Bj6tdsTYL7l1AxcCKDPx4IHtT9nowpYiIe9ywoLbWfpDzAGYBG67ZJnJdgf6BtKndplhLkMclxNGjfo9SUbiVNMNbDOfM5TP8bcXfGNB4AKseWsWSB5dwW/PbfHqgYWFF1Y7iQvoF9p/e73SUUuHLnV+SdDEp38GI19OgWgPm3zuftMw0Bnw8gKPnjnogoYiI+xRklo/vjDFVjDE1gPXAJGPMa56PJqVJdGj2EuRFWbzh+PnjbD6+Wf2nHTKk6RDeH/Y+u/9vN9NHTKdLeBenI3mEliB3r4nrJhJZLZIBjQcU6fhWIa2Ye/dcjp8/zsCPB3Iq9ZSbE4qIuE9Bbi9VtdaeBW4HPrTWdgFU2UihRIdFk3QxiaPnC3+naVHCIkDLjTvF38+fB6IfoHGNxk5H8ajWIdmrNqofdfHtPLmT7/Z/xyMdHinWXzE61+vMV6O+YlfyLoZOG8qFtAtuTCki4j4F+U4XYIypA4wEvvFwHiml2tcp+sDEuH1xVAuuRoc6HdycSuR/KperTMNqDbUEuRtMXDuRAL8AxrQfU+xz9W/Un6m3T2VV4irunHEnaZlpbkgoIuJeBSmo/wB8C8Rba38wxjQCij7/mZRJbUPbAoUvqK21LNi3gL4N++Lv5++BZCL/ExUapTvUxZSansoHmz7g9pa3E1op1C3nvKPVHUwcOpF58fO4/6v7yczKdMt5RUTcpSCDEmdYa9taax9zfb7PWnuH56NJaVKlXBUaV29c6II6PiWeQ2cPqf+0eEVU7Sh2ndzF5YzLTkcpsWZsn8GpS6d4tOOjbj3vwx0e5q/9/8onWz/hiblPFGk8hoiIp1x/pn3AGBMCjAUic+9vrS3+3/KkTCnKEuRx++IA9Z8W74iqHUWmzWTnyZ20C2vndJwSacLaCTSr2Yw+kX3cfu5nezzLyYsn+duKv1GzQk3+0OcPbm9DRKQoblhQkz3n9FIgDtDf2aTIosOi+XzH55y7fI7K5SoX6Ji4hDjqV61f4lbek5Ip9xLkKqgLb/PxzaxMXMk/BvwDY4xH2vhr/7+SkprCH5f8kZrla/Jk1yc90o6ISGEUpKCuYK39lceTSKmXs2Li5uOb6VG/xw33z8zKZFHCIm5vcbvHfjiL5Na0RlOC/IM0dV4RTVw7kXL+5bi/3f0ea8MYw4ShEzh16RRPffsU1ctX575293msPRGRgijIoMRvjDGDPZ5ESr3osGig4AMT1x9dz+lLp9XdQ7wm0D+QlrVaamBiEZxPO89Hmz9iROsR1KxQ06NtBfgFMPX2qfRr2I8xM8cwa9csj7YnInIjBSmonyS7qE41xpw1xpwzxpz1dDApfepWrkutCrUKXFDn9J/u27CvB1OJXE0zfRTNJ1s/4VzaOcZ1LPzKiEVRLqAcX/70SzrU6cDIGSNZcWiFV9oVEclLQWb5qGyt9bPWlrfWVnF9XsUb4aR0McYUamBiXEIcbUPbum3qLZGCaBPShsSziVqZr5AmrJ1A65DWdI/o7rU2K5erzJy75xBRNYLhnwzXsvEi4piCLD1+U14Pb4ST0ic6NJqtJ7aSnpl+3f0upl9k2cFlmi5PvC5nYKIWeCm4tUfWsu7oOsbFjPP6eIdaFWrx9eivSc9K59Zpt3L2sv6AKiLeV5AuH8/kerwAfA383oOZpBRrX6c9lzMvsyt513X3W35wOWmZaeo/LV4XVft/M31IwUxcO5EKgRW4t+29jrTfolYLZoyYwY6kHYz+fLQWfhERrytIl49bcz1uBtoA+luoFElBBybG7Ysj0C+Qng16ej6USC7hVcKpWq6q7lAX0JlLZ5i6dSqjWo+ianBVx3L0b9SfNwe/yZw9c/jl/F86lkNEyqaC3KG+ViLQ0t1BpGxoVrMZwQHBNy6oE+LoFtGNSkGVvBNMxMUYo4GJhfDx5o+5mH6RcTHeGYx4PeNixvFklyf55+p/MnHtRKfjiEgZUpCVEv8N5Kzx6gdEA+s9mElKsQC/AKJqR123oD558SQbjm7gpd4veS+YSC5RtaOYumUq1lrNgX4d1lomrptIhzodiKkb43QcAP4x4B/sSdnD43Mep3GNxuo2JiJeUZA71GuBda7HSuBX1tp7PJpKSrWcmT6stXk+vzhhMRarH4TimKjaUZy5fIbEs4lOR/FpKxNXsuXEFh7t+KjP/OLh7+fPtDum0TKkJSNmjGDXyeuP1xARcYeC9KH+IOcBzAHOeT6WlGbtw9qTkpqSb7ESty+OykGV6VSvk5eTiWTLvQS55G/C2glUDqrM6DajnY5ylSrlqvD16K8J9Atk6LShJF9MdjqSiJRyBZk27ztjTBVjTA2yu3pMMsa87vloUlrdaGBiXEIcfRr2IcDvhj2SRDyiTe02AFqC/DpSUlOYvm0697S9h8rlKjsd50ciq0Xy1aivOHjmIHdMv4O0zDSnI4lIKVaQLh9VrbVngduBD621XYB+no0lpVlUaBQGk2dBve/UPvad2qf5p8VR1YKrEVElQneor+ODjR9wOfMyj3Z81Oko+eoe0Z3Jt03m+wPfM/6b8fl2MxMRKa6CFNQBxpg6wEjgGw/nkTKgUlAlmtZsysbjG3/03MJ9CwHUf1ocp5k+8pczGLFreFfahbVzOs513d32bn7b87dM3jiZf6z8h9NxRKSUKkhB/QfgWyDeWvuDMaYRsMezsaS0iw6LZsPRHy9BHpcQR93KdWlRq4UDqUT+J6p2FDuSdtxwVc+y6PsD37MreRfjOjo/VV5BvNTnJUa0GsGzC55l1q5ZTscRkVKoIIMSZ1hr21prH3N9vs9ae4fno0lpFh0aTcLpBE5fOn1lW5bNYuG+hfRv1N9nZgyQsiuqdhTpWensTt7tdBSfM2HtBKoFV2Nk65FORykQP+PHlOFT6Fi3I3d9ftcN58EXESmsoizsIlJs7eu0B2Dz8c1Xtm06tonk1GT1nxafcGVgorp9XOXEhRN8seML7m93P+UDyzsdp8AqBFZg1qhZVC9fndum3cax88ecjiQipYgKanFEXjN9xO2LA6BfI415Fee1qNUCf+OvJciv8f6G90nPSvfpwYj5qVO5DrNGzSI5NZlhnwwjNT3V6UgiUkqooBZHhFUKI7Ri6NUFdUIcrUJaUbdyXeeCibiUCyhH81rNdYc6lyybxcR1E+nVoBctQ1o6HadI2tdpz39v/y8/HP6BB2c+qJk/RMQtCjIP9W9zfVzOs3GkLIkOi75SUF/KuMTSA0vV3UN8SlTtKM1FncuCvQtIOJ1QIu9O5za8xXD+0v8vfLrtU176/iWn44hIKZBvQW2M+ZUxphtwZ67NKz0fScqK6LBotp7YSlpmGisPrSQ1I1XT5YlPiaodRcLpBM5d1gKxABPXTaRWhVrc3vJ2p6MU2zPdn+HB6Ad56fuXmLZlmtNxRKSEu94d6p3ACKCRMWapMWYSUNMY09w70aS0ax/WnvSsdHYk7SBuXxz+xp9ekb2cjiVyRc4S5OpHDYfPHmbWrlmMiR5DuYCS/8dKYwwThk7gpgY38eDMB1mVuMrpSCJSgl2voD4NPAfEA72Bf7m2/9oYs8KzsaQsyD0wMS4hji7hXahSroqzoURy6VinIwBrDq9xOInz3tvwHpk2k7EdxzodxW2C/IP4fOTnhFcJZ9gnwzhw+oDTkUSkhLpeQT0QmA00Bl4DugAXrLUPWmu7eyOclG5NajShQmAFFu9fzNoja9V/WnxOvSr1qF+1PisTy3Zvt4ysDCatn8TNjW6mSY0mTsdxq1oVavHNXd9wOeMyt067Vd17RKRI8i2orbXPWWv7AfuBjwB/IMQYs8wY87WX8kkp5u/nT9vQtkzbOo0sm6X+0+KTukd0Z8Whsv1Hubl75pJ4NpFxMSVjZcTCalGrBTNGzGB70nZGfz6azKxMpyOJSAlTkGnzvrXWrrXWvgMkWmtjgQc9nEvKiOjQaNIy06gYWJEu4V2cjiPyI93Cu3Ho7CESzyY6HcUxE9ZNIKxSGLc2u9XpKB5zc+Ob+fct/2b2ntk8u+BZp+OISAlTkKXHc39necC17aSnAknZkrNiYq/IXgT5BzmcRuTHukdk93BbeahsdvvYf3o/c/fM5eH2DxPoH+h0HI8a32k8T3R+gtdWvcakdZOcjiMiJUihFnax1m7yVBApm9qHZRfU6j8tvqpdaDvKB5Qvs90+3l3/LsaYUjUY8Xr+MfAfDGoyiMfmPFZmr7mIFJ5WShRHxdSN4f1h75eZH9ZS8gT6B9KpXqcyOTAxPTOdd9e/yy1NbqF+1fpOx/GKAL8APrnjEyKqRHDX53dx5tIZpyOJSAmgglocZYzhgegHqBRUyekoIvnqHt6d9UfXcynjktNRvGrmrpkcv3C81A5GzE/V4KpMvWNq9kDM2eO0PLmI3JAjBbUx5ufGmG3GmK3GmGnGmGBjTENjzGpjTLwx5lNjjDrUiohP6BbRjfSsdNYdWed0FK+auG4iEVUiuKXJLU5H8bqu4V15qfdLfLL1Ez7c9KHTcUTEx3m9oDbG1AOeAGKstW3Ino5vFPBX4HVrbRPgFPCQt7OJiOSlW3g3gDLVp3bXyV3E7YtjbIex+Pv5Ox3HEb+O/TW9GvTi8TmPE58S73QcEfFhTnX5CADKG2MCgArAUaAv8Jnr+Q+A4c5EExG5WkjFEJrUaMKKxLJTUL/0/UuUDyhfpsc3+Pv589FPPiLIP4jRn48mLTPN6Ugi4qO8XlBbaw8DfwcOkl1InwHWAaettRmu3RKBet7OJiKSn+4R3Vl5aGWZ6E+76dgmpm2dxpNdniSsUpjTcRwVUTWCd297l7VH1vLi4hedjiNSpmVmZrJr1y6nY+TJiS4f1YFhQEOgLlARGFSI4x8xxqw1xqxNSkryUEoRkat1C+/G8QvHSTid4HQUj3t+0fNUC67Gsz20wAnA7S1v55EOj/Dq8ldZuG+h03FEyqSDBw8yceJEPvnkE44fP+50nB9xostHfyDBWptkrU0HvgB6ANVcXUAAwoHDeR1srX3HWhtjrY0JCQnxTmIRKfPKygIvyw8uz14tsPuzVC9f3ek4PuO1ga/RvFZz7v3yXk5e1NpmIt6SmprKrFmzeP/990lLS2P06NGEhoY6HetHnCioDwJdjTEVjDEG6AdsBxYDd7r2uR+Y6UA2EZE8tQ5pTeWgyqV6YKK1lt8s/A2hFUN5ossTTsfxKRWDKjLtjmkkpybz0KyHykTXHxEnWWvZvHkzb775Jhs3bqRbt2489thjNGvWzOloeXKiD/Vqsgcfrge2uDK8A/wK+IUxJh6oCbzn7WwiIvnx9/OnS3iXUj0wcV78PJYeXMoLN71AxaCKTsfxOdFh0fy1/1+ZtWsWb6992+k4IqVWcnIyH330EV9++SXVq1fnkUceYcCAAQQF+e6MyqYk/5YdExNj165d63QMESkjfrf4d7y89GXO/PpMqVuMKMtm0fGdjpy5dIad/7eTIH/f/cHlJGstQ6YOYfH+xfww9gfa1G7jdCSRUiMjI4Ply5ezdOlSAgIC6NevHx07dsTPzzfWITTGrLPWxuT1nG8kFBEpAbpFdCPLZrHm8Bqno7jdjG0z2HhsIy/1fknF9HUYY3h/2PtULVeV0Z+PJjU91elIIqXC/v37mTBhAt999x0tWrTg8ccfp1OnTj5TTN9IyUgpIuIDuoZ3BUrfwMT0zHReWPwCrUNac1fUXU7H8XmhlUKZMnwKW09s5dkFmgmltDh16hSrV68mMTGRjIyMGx8gbnHx4kVmzpzJBx98QGZmJnfffTd33nknlStXdjpaoQTceBcREQGoFlyN1iGtS10/6ikbp7AnZQ8zR80ss6siFtagJoP4edef8/qq1xnQeAC3Nr/V6UhSTAcOHGDevHkA+Pn5ERoaSr169a48atasWWLulpYE1lo2bdrE/PnzuXz5MrGxsdx0000EBgY6Ha1I1IdaRKQQxs4ay+c7PufksyfxMyX/h2tqeipN/92U+lXrs3zMcrInX5KCuJxxma7vdeXQmUNsHr+ZupXrOh1Jiuns2bMcOXKExMREjhw5wuHDh0lLy14hMygoiLp1615VZFeuXFlfM0Vw8uRJvvnmGw4cOEBERARDhw6ldu3aTse6oev1odYdahGRQuge0Z13N7zL7uTdtKjVwuk4xfafH/7D4XOH+fj2j1UYFFK5gHJMu2MaHSZ24P6v7ufbe74tFb9klWVVqlShSpUqtGiR/bVtreXkyZMcPnyYw4cPc+TIEVauXElWVhYAlSpVuqrArlu3LsHBwU6+BJ+WkZHB0qVLWbZsGUFBQQwdOpQOHTqUiu89KqhFRAqhW0Q3AFYcWlHiC+ozl87w52V/ZkDjAfSO7O10nBKpRa0W/GvQv3jkm0f4x4p/8EyPZ5yOJG5kjCEkJISQkBCio6OB7KLw2LFjVwrsw4cPX7Ucds2aNa8qskNDQwkIULm1b98+Zs+eTUpKClFRUQwYMIBKlUrPbEm6wiIihdCsZjNqlK/BykMrGdN+jNNxiuUfK/9BSmoKr/R9xekoJdrDHR7m273f8tyi5+jTsA8xdfP8i7CUEgEBAYSHhxMeHn5lW2pq6pXi+siRI+zdu5fNmzcDUL58ecaMGUOtWrWciuyoCxcuMH/+fDZv3kyNGjW49957adSokdOx3E59qEVECmno1KEknE5g22PbnI5SZCcunKDRvxpxS9NbmDFihtNxSryU1BTaTWhH+YDyrH90fambp1wKx1rL2bNnSUxM5Ouvv6Zu3brce++9paJrQ0FZa9mwYQMLFiwgLS2N2NhYevbsWaLv1mseahERN+oW3o3tSds5lXrK6ShF9srSV0jNSOWPff7odJRSoUb5Gnz8k4+JT4nniblatr2sM8ZQtWpVWrduTb9+/UhISGDbtpL7C3hhpaWlMWPGDL7++mtCQ0MZN24cffr0KdHF9I2ooBYRKaTuEd0BWH14tcNJiubA6QO8vfZtHmj3QInvB+5LekX24vmez/P+xvf5dOunTscRH9GxY0fq1KnDt99+y+XLl52O43Fnz55lypQp7Nixg5tvvpn777+fkJAQp2N5nApqEZFC6lSvE37GjxWHSuZ81C99/xIAv+v9O4eTlD4v9nqRruFdefSbR9l/er/TccQH+Pn5MWTIEM6fP8/ixYudjuNRR44cYdKkSSQnJzN69Gi6d+9eZrq5qKAWESmkSkGVaBfajpWJJW/FxB1JO/hg0wc8FvMY9avWdzpOqRPoH8jU26eSZbO4+4u7ycjSinsC9erVo2PHjqxZs4Zjx445Hccjtm/fzvvvv4+/vz9jxoyhWbNmTkfyKhXUIiJF0C28G6sSV5GZlel0lEJ5YfELVAiswHM9n3M6SqnVsHpDJgydwIpDK3h5yctOxxEf0a9fP8qXL8/s2bMpyRNCXMtay5IlS5gxYwZ16tRh7NixhIaGOh3L61RQi4gUQfeI7pxPO8/WE1udjlJga4+s5fMdn/OLrr8gpGLp79PopLui7uLetvfyxyV/ZNnBZU7HER9Qvnx5br75ZhITE9m4caPTcdwiIyODL7/8ksWLF9O2bVvuu+8+Klas6HQsR6igFhEpgpyBiSWp28dzC5+jZvmaPN39aaejlAn/GfwfIqtFcvcXd5foGWHEfdq1a0f9+vVZsGABFy9edDpOsZw/f54pU6awZcsW+vbty/Dhw0v1LB43ooJaRKQIIqtFEloxtMQMTFycsJgF+xbwm9jfUKVcFafjlAmVy1Vm2h3TOHLuCONmjytVf+aXojHGMHjwYC5dusTChQudjlNkx44dY9KkSZw4cYKRI0fSs2fPMjP4MD8qqEVEisAYQ/eI7iXiDrW1lt8s/A31KtfjsU6POR2nTOlcrzN/7PNHpm+bzvsb33c6jviA0NBQunTpwvr160lMTHQ6TqHt2rWLyZMnY63lwQcfpGXLlk5H8gkqqEVEiqhbeDfiU+I5ceGE01Gua9auWaw+vJrf9/495QPLOx2nzHmm+zP0iezDz+b+jMSzJa+AEvfr3bs3lStXZvbs2WRlZTkdp0CstSxfvpxPPvmEkJAQxo4dS506dZyO5TNUUIuIFNGVftSHfPcudWZWJs8vep5mNZvxQPQDTscpk/z9/Hnn1ne4mH6R/27+r9NxxAeUK1eOgQMHcuzYMX744Qen49xQZmYms2bNIi4ujtatW/PAAw9QuXJlp2P5FBXUIiJF1LFuRwL9An2628fULVPZlrSNP/b5IwF+ZXfAkNOa1GhC1/CuTNs6zeko4iNatWpFo0aNWLx4MefPn3c6Tr4uXrzIhx9+yMaNG+nVqxd33HEHgYGBTsfyOSqoRUSKKDggmA51OvjswMS0zDR+993vaB/Wnjtb3el0nDJvdJvRbDq+iR1JO5yOIj4gZ4BiRkYG8+fPdzpOnpKSknj33Xc5fPgwd9xxB7179y7zgw/zo4JaRKQYukd054cjP5Ceme50lB+ZtG4SCacTeKXfK/gZfbt32sjWI/EzfrpLLVfUrFmT7t27s2XLFhISEpyOc5X4+Hjee+890tLSeOCBB2jTpo3TkXyavsOKiBRDt/BuXMq4xMZjG52OcpULaRf445I/clODmxjYeKDTcQQIqxRGn8g+TNs6TVPoyRU9e/akWrVqzJkzh8xM51detdayevVqpk6dSrVq1Rg7dizh4eFOx/J5KqhFRIqhW0Q3AJ/r9vHG6jc4fuE4f+73Z/2J1oeMbjOa+JR41h5Z63QU8RGBgYHccsstnDx5kpUrnR2PkZmZyZw5c5g3bx7NmjVjzJgxVK1a1dFMJYUKahGRYgivEk79qvV9amDiqdRTvLriVYY2G3plJhLxDbe3vJ0g/yB1+5CrNGvWjObNm7NkyRLOnDnjSIbU1FT++9//snbtWnr06MFPf/pTgoKCHMlSEqmgFhEppm7h3XzqDvWry1/lzKUz/Knvn5yOIteoXr46tzS5hU+3fUpmlvN/3hffMWjQIKy1zJs3z+ttJyUl8d5773HgwAGGDRtG//799ZetQlJBLSJSTN0junPo7CGfWLTj6Lmj/Gv1vxgdNZq2oW2djiN5GN1mNEfOHWHpwaVORxEfUq1aNXr16sXOnTvZvXu3V9q01rJ27VreeecdUlNTue+++4iOjvZK26WNCmoRkWLqFp7dj9oXFnh5ecnLpGel84fef3A6iuTj1ua3UjGwIlO3THU6iviYbt26UatWLebOnUt6umdnDrp48SLTp09n9uzZNGjQgHHjxtGgQQOPtlmaqaAWESmm6LBoygeUd7zbx96Uvbyz/h0ebv8wjWs0djSL5K9CYAWGtRjGZ9s/Iy0zzek44kP8/f0ZPHgwp0+fZtmyZR5rJyEhgQkTJrB7924GDBjA3XffrZUPi0kFtYhIMQX6B9KpXifHByb+7rvfEegXyAu9XnA0h9zYXW3u4tSlU8zf65sLeohzGjZsSFRUFMuXLyc5Odmt587MzGThwoV8+OGHBAUF8fDDD9OtWzf1l3YDFdQiIm7QLbwb64+u51LGJUfa33J8C1O3TOVnnX9G3cp1HckgBXdz45upUb6GZvuQPN18880EBAQwd+5ct81ZnpKSwuTJk1m2bBkdOnTgkUceoU6dOm45t6igFhFxi+4R3UnPSmfdkXWOtP/8ouepUq4Kv4r9lSPtS+EE+QdxZ8s7mblzJhfSLjgdR3xM5cqV6dOnD3v37mXHjuItVW+tZdOmTUycOJGUlBRGjBjBrbfeqinx3EwFtYiIG3QN7wo4s8DL6sTVfL37a57p/gw1ytfwevtSNKOjRnMh/QJf7/7a6Sjigzp16kRYWBjz5s3j8uXLRTrHpUuX+OKLL/jqq6+oU6cO48aNo1WrVm5OKqCCWkTELWpXrE2TGk1Ykej9gvrlpS9To3wNnuz6pNfblqLrWb8ndSvXVbcPyZOfnx+DBw/m3LlzfP/994U+/tChQ0ycOJFt27bRp08f7rvvPq166EEqqEVE3KRbeDdWHlrptj6PBbHx2Ea+2f0NP+/6cyoFVfJau1J8/n7+jGo9irl75nIq9ZTTccQHRURE0L59e1atWsWJEycKdExWVhbff/8977//PgBjxozhpptuws9PJZ8n6d0VEXGT7hHdOX7hOAmnE7zW5itLX6FKuSr8X+f/81qb4j6jo0aTnpXOFzu+cDqK+Kj+/fsTHBzM7Nmzb/jL+pkzZ/jggw/47rvvaNOmDePGjSM8PNxLScs2FdQiIm7SPaI74L0FXnYk7eCz7Z/xf53+j2rB1bzSprhXxzodaVKjCVO3apEXyVuFChXo378/Bw8eZNOmTfnut23bNiZMmMCxY8f4yU9+wu233065cuW8mLRsU0EtIuImrUNaUzmostcGJv552Z8pH1iep7o+5ZX2xP2MMYxuM5rFCYs5eu6o03HER7Vv357w8HAWLFhAamrqVc+lpaUxc+ZMPvvsM2rWrMmjjz5K27ZtHUpadqmgFhFxE38/f7qEd/HKwMR9p/YxdctUxnUcR0jFEI+3J54zus1oLJbp26Y7HUV8lDGGIUOGkJqayqJFi65sP3LkCO+88w4bN26kZ8+ePPjgg9SooZl+nKCCWkTEjbqFd2Pz8c2cTzvv0Xb+uuyvBPgF8HT3pz3ajnhey5CWRIdFa7YPua6wsDA6derE2rVrOXz4MCtWrOC9994jPT2d+++/n759++Lv7+90zDJLBbWIiBt1j+hOls1izeE1Hmsj8Wwi7298nzHtx2hVxFJidJvRrD68mr0pe52OIj6sT58+VKpUiSlTprBgwQKaN2/OuHHjiIyMdDpamaeCWkTEjXIWePHkwMS/Lf8bFsuzPZ71WBviXaPajALgk62fOJxEfFlwcDCDBw8mKCiIoUOHMmLECMqXL+90LAECnA4gIlKaVAuuRquQVh7rR338/HHeWf8O97a9l8hqkR5pQ7yvftX69IjowbSt03j+puedjiM+rGXLlrRo0QJjjNNRJBfdoRYRcbPu4d1ZeWglWTbL7ed+fdXrpGWm8evYX7v93OKsu6LuYlvSNrYc3+J0FPFxKqZ9jwpqERE36xbRjVOXTrE7ebdbz5uSmsJ/fvgPI1uPpFnNZm49tzhvRKsR+Bt/DU4UKYFUUIuIuFnOAi/uno/6jdVvcD7tPM/FPufW84pvCKkYQv9G/Zm2dZpXl68XkeJTQS0i4mbNajajRvkabh2YePbyWf61+l8MbzGcqNAot51XfMvoNqPZf3o/qxJXOR1FRArBkYLaGFPNGPOZMWanMWaHMaabMaaGMWaBMWaP69/qTmQTESkuP+NH1/Cubh2Y+PYPb3P60mme76kBa6XZT1r+hHL+5dTtQ6SEceoO9b+AedbaFkA7YAfwa2ChtbYpsND1uYhIidQ9vDvbk7ZzKvVUsc91Mf0i/1j5DwY1GURM3Rg3pBNfVaVcFYY2G8r0bdPJyMpwOo6IFJDXC2pjTFXgJuA9AGttmrX2NDAM+MC12wfAcG9nExFxl24R3QBYfXh1sc81ad0kki4m6e50GTG6zWiOXzjO4oTFTkcRkQJy4g51QyAJeN8Ys8EY864xpiIQaq096trnGBDqQDYREbfoXK8zfsav2AMTL2dc5tUVr9KrQS9i68e6KZ34ssFNB1M5qLK6fYiUIE4U1AFAB+Bta2174ALXdO+w2cOb8xzibIx5xBiz1hizNikpyeNhRUSKolJQJdqGtmVlYvEGJn6w6QOOnDvCb2/6rZuSia8rH1ien7T8CV/s+ILLGZedjiPiUzIu+2ZXKCcK6kQg0Vqb83fQz8gusI8bY+oAuP49kdfB1tp3rLUx1tqYkJAQrwQWESmK7uHdWZW4isyszCIdn56Zzp+X/Zku9brQr2E/N6cTXza6zWjOXD7D3Pi5TkcR8QmZaZks/fNS3mj8BhdOXHA6zo94vaC21h4DDhljmrs29QO2A7OA+13b7gdmejubiIg7dY/ozvm082w9sbVIx0/bOo39p/fzfM/ntTJaGdOvYT9CKoSo24cIcHDZQSZ2mMii5xYR3iUcm+V787QHONTuz4D/GmOCgH3Ag2QX99ONMQ8BB4CRDmUTEXGLnIGJKxNX0i6sXaGOzczK5JWlr9AutB1Dmw31RDzxYYH+gYxoNYLJGydz7vI5Kper7HQkEa9LTUllwa8WsOHdDVStX5VRs0bR/NbmNz7QAY5Mm2et3ejqttHWWjvcWnvKWptsre1nrW1qre1vrU1xIpuIiLs0rNaQ0IqhRRqY+MWOL9iVvEt3p8uw0VGjuZRxiZm79AdbKVustWz+eDNvtniTje9vpNsvu/HYtsd8tpgG5+5Qi4iUesYYukV0K/TARGstLy99mRa1WnB7y9s9lE58XfeI7kRUiWDa1mnc0/Yep+OIeEXynmRmj59NwsIE6nWpx70L7iWsXZjTsW5IS4+LiHhQ9/DuxKfEc+JCnuOs8/TN7m/YfHwzv4n9Df5+/h5MJ77Mz/gxqs0o5u+dz8mLJ52OI+JRGZcz+P6P3/N21Nsc+eEIg/8zmDHLx5SIYhpUUIuIeFT3iO4ArDxUsLvUOXenG1ZryOg2oz0ZTUqAu6LuIiMrg8+2f+Z0lFIvPTWdS2cuOR2jTNr//X4mtJvAdy9+R4thLXh85+N0eqwTfv4lp0xVlw8REQ/qWLcjgX6BrExcybAWw264/8KEhaw5vIaJQycS6B/ohYTiy9qFtqNFrRZM2zqNcTHjnI5TqsXPi2f6HdOp1aIW9TrXu/IIbRuKf5D+UuQJF09eZMEzC9g4ZSPVGlbj7rl302RQE6djFYkKahERDwoOCKZDnQ4FHpj48pKXqVe5Hve3u//GO0upZ4xhdJvR/P6735N4NpHwKuFORyq1QlqF0OcPfTi85jDxc+PZ9MEmAPyD/AlrH3ZVkV2jSQ2MnwYLF5W1lk0fbGL+L+dz+cxlevy6B71e6EVghZJ7E0EFtYiIh3UL78aEdRNIz0y/7l3npQeW8v2B7/nnwH9SLqCcFxOKLxvdZjS/++53fLr1U57u/rTTcUqtWs1rcdNvbwKyC76zh85yeM3hK48Nkzew5t9rAAiuFkzdTnWvKrIrhVVyMn6JcXLnSb4Z9w0Hvj9ARPcIhk4cSu02tZ2OVWwme5XvkikmJsauXbvW6RgiItc1Y9sMRn42kjUPr6FTvU757jfo40GsP7qe/U/tp0JgBS8mFF8X804MAGsf8dzPvPTMdF5Y/AKTN0xm5UMraVyjscfaKomyMrM4uePkVUX28c3HsZnZdVSViCr/K7C71KNux7oEVQpyOLXvyLiUwdJXlrLsL8sIqhhE/1f70+GhDiXqTr8xZp21Niav53SHWkTEw3IWeFlxaEW+BfXaI2v5du+3/KXfX1RMy4/cFXUXT89/mt3Ju2lWs5nbz59wKoG7vriLVYmrAPjvlv/yYq8X3d5OSebn70ftNrWp3aY27ce0ByD9YjrHNh67qsje8fkOAIyfIaRVCJF9I+n/l/4Eli+53RmKa9/CfcweP5uUPSlE3R3FgH8MoFJo6bqjX3KGT4qIlFDhVcKJqBJx3fmo/7T0T1QPrs74TuO9mExKip+2/ikGw7Qt7l+KfMa2GbSf2J7tSduZfud0YuvHMmP7DLe3UxoFVggkonsEXZ/qyh1T7+CJ+Cd4JukZ7p57N71+14sqEVVY88Yalv1lmdNRHXHhxAW+vPdLPur/EVi4Z/493P7x7aWumAYV1CIiXtE9onu+AxO3HN/CVzu/4okuT1ClXBUvJ5OSoF6VetzU4CambZ2Gu7pqpqanMu6bcYz8bCTNazVn46MbGdF6BCNajWDria3sSNrhlnbKmgq1KtBkUBN6vdiLu+fcTdRdUSz/63KS9yQ7Hc2rds3axZst3mTrp1vp+duejNs8jsY3l95uRCqoRUS8oFt4Nw6dPUTi2cQfPffKsleoFFSJJ7o84UAyKSlGtxnNruRdbDy2sdjn2nZiG53f7czEdRN5tvuzLHtwGQ2rNwTgjpZ3AOgutZvc/PebCSgXwNyfzXXbL0O+zFrL8leX88nwT6jRuAbjNo2j7x/7lvouLyqoRUS8IL8FXnYn72b6tuk83ulxapSv4UQ0KSHubHUnAX4BTN0ytcjnsNby7vp36TSpEycunGDe3fP4681/vWr2mXpV6qnbhxtVrlOZPn/sw95v917pX11aZVzOYNaYWcT9Ko7WI1rzwJIHCGkZ4nQsr1BBLSLiBe3C2hEcEPyjbh9/WfYXyvmX4xfdfuFQMikpalaoycDGA/lk2ydk2axCH3/m0hlGfz6asV+PpUf9Hmwat4mBTQbmua+6fbhXp8c6ERYdxryn5nH53GWn43jExZMX+ejmj9g4ZSO9fteLOz65o9Tflc5NBbWIiBcE+QfRqW6nqwYm7j+9n482f8TYDmOpXbHkz8Mqnje6zWgSzyay/ODyQh235vAa2k9sz2fbP+PP/f7Mt/d8S1ilsHz3V7cP9/IL8GPwW4M5d/gc3//he6fjuF3S9iQmdZ7EkR+OcMe0O+j9+94YU3Kmw3MHFdQiIl7SPaI764+u51LGJQBeXf4qBsMzPZ5xOJmUFMNaDKN8QHmmbS3YbB9ZNou/Lf8bPSb3IMtmseTBJfw69tf4mev/+K9XpR49InqooHajiG4RtH+4PateX8WJrSecjuM28fPiea/be6RfTOf+7+6nzag2TkdyhApqEREv6RbejfSsdNYdWceRc0eYvGEyD0Y/qOWkpcAqBVXi1ua3MmP7DNIz06+774kLJxj838E8G/csw5oPY8OjG6705S+Ika1HsvXEVnae3Fnc2OLS/y/9Ca4WzOzHZpf4AYrWWla/sZqpQ6ZSrWE1xv4wlvAuZfd7mQpqEREvyb3Ayz9W/IOMrAx+Ffsrh1NJSTO6zWhOXjxJ3L64fPdZuG8h7Sa047v93/H2kLeZMWIG1ctXL1Q7V7p9bNNdanepULMC/f/Sn4NLD7L5o81OxymyzPRMZj82m3lPzqP5bc0Zs2wMVSOqOh3LUSqoRUS8pHbF2jSu3pivd3/NhHUTuLvt3TSq3sjpWFLC3NLkFqqWq5pnt4+MrAyeX/g8N390M9WDq7Nm7BrGxYwrUn9WdfvwjPZj2hPeNZz5v5xP6qlUp+MUWuqpVP57y39ZN2EdPX7Vg5Gfj9QS66igFhHxqu4R3Vl6cCmp6an8JvY3TseREqhcQDnuaHkHX+78ktT0/xVkB04foNeUXryy7BXGtB/DD2N/oG1o22K1NaLVCLac2KJuH25k/AxD3h5CanIqi367yOk4hZK8O5n3ur7HgSUHGDZlGP3/0h/jV7YGH+ZHBbWIiBfl9GG9s9WdtKjVwuE0UlKNjhrN+bTzzN4zG4AvdnxB9MRothzfwtTbp/Lube9SMahisdu5s9WdgLp9uFtYdBid/q8Ta99ey5G1R5yOUyAJixJ4t+u7pKakcv+i+4m+P9rpSD5FBbWIiBcNbjqY6LBoft/7905HkRKsT2QfQiuG8v7G93l89uPcMf0OmtRowoZHNzA6arTb2lG3D8/p84c+VAqtxOzxs8nKLPy84t607p11fDzwYyrXqczDqx+mfmx9pyP5HBXUIiJeVL9qfTY8uoFWIa2cjiIlmL+fPyNbj2TOnjm8tfYtnu72NMvHLKdxjcZub0vdPjwjuGowA/4xgCNrj7B+0nqn4+QpKyOLeU/N45tHv6HRzY14aOVDVG9UuMGtZYUKahERkRLosU6P0SOiB7Pvms3fB/ydIH/PDAy7o5Vm+/CUNqPbENknkoW/WciFExecjnOVS2cuMe22aaz+12q6PNWF0bNGU65KOadj+SwV1CIiIiVQi1otWDZmGYObDvZoO+FVwtXtw0OMMQz+z2DSLqSx4NkFTse54lTCKSZ3n8y+BfsYMmEIg14fhF+ASsbr0bsjIiIi15XT7WPXyV1ORyl1QlqG0O3pbmz6YBMHlh5wOg4Hlx3k3c7vcu7IOe759h5iHo1xOlKJoIJaRERErutKtw/dpfaIm357E1XrV2XOY3PITM90LMfGDzbyQd8PCK4ezMOrH6Zh34aOZSlpVFCLiIjIdYVXCad7RHemb5vudJRSKahiEIPeGMSJrSdY/cZqr7dvsyxxv45j5gMzadCzAQ+vfpiazWp6PUdJpoJaREREbmhkq5Hq9uFBzW9rTtMhTfn+999zNvGs19o9e/gsH938Ecv/upyOj3bk7nl3U756ea+1X1qooBYREZEbUrcPzzLGcMsbt5CVkcW3v/jWK23unLmTCe0mkLgqkVsn3cqQt4fgH+jvlbZLGxXUIiIickM53T5UUHtO9UbV6fl8T7bP2M7e+Xs91k56ajqzH5/Np8M/pWr9qjyy7hE6PNwBY7SMeFGpoBYREZECGdFqBJuPb1a3Dw/q/kx3ajStwZzH55BxKcPt5z+++TiTYiax9q21dHu6Gw+tfIhaLWq5vZ2yRgW1iIiIFMidre4E1O3DkwLKBTD4zcGkxKew/G/L3XZeay2r/72aSZ0ncTH5Ivd8ew8D/j6AgHIBbmujLFNBLSIiIgWibh/e0XhAY1qNaMWyV5Zxat+pYp/vQtIFpt06jXlPzKNR/0aM3zyexgPcv0x9WaaCWkRERApM3T68Y+DrA/EL8GPuz+ZirS3yefbO38uEthPYF7ePQW8MYvTXo6lYu6IbkwqooBYREZFCULcP76hSrwq9X+rNnjl72DWz8L+8ZFzOYP4v5/PxwI8pX6M8Y9eMpcvPumjgoYeooBYREZECU7cP7+n8s87UblObuU/MJe1CWoGPO7nrJO91e4+V/1hJzGMxjF07ltC2oR5MKiqoRUREpFByun3sTt7tdJRSzT/QnyFvD+HsobMseXnJDfe31rL+3fW80+Edzhw8w6iZoxjynyEElg/0QtqyTQW1iIiIFMqVbh/bdJfa0+rH1if6gWhW/n0lSduT8t0v9VQqn438jK/Hfk14t3DGbx5P89uaezFp2aaCWkRERAolvEo43cK7qduHl/T/a3+CKgUx5/E5eQ5QPLDkABPaTWDnVzvp/2p/7p1/L5XrVnYgadmlglpEREQKbWTrkWw6vkndPrygYu2K9PtzP/Z/t5+t07Ze2Z6ZnsmiFxbxQZ8PCCgXwEMrH6LHMz0wfhp46G0qqEVERKTQ1O3DuzqM7UDdTnX59hffcunMJU7tO8WUm6aw9OWltLu/HY9ueJS6MXWdjllmqaAWERGRQlO3D+/y8/djyFtDuHDiAtNvn86E6Akk7Ujijk/uYNjkYQRVCnI6YpmmglpERESKZESrEer24UV1Y+oSMz6GhEUJhLYNZdzGcbT5aRunYwkqqEVERKSI1O3D+wb8fQCjZo3ige8eoFpkNafjiIsKahERESmSiKoR6vbhZYHlA2l+a3P8AlTC+RJdDRERESmynG4fe5L3OB1FxDEqqEVERKTIrnT70F1qKcMcK6iNMf7GmA3GmG9cnzc0xqw2xsQbYz41xmi4qoiIiI/L6fYxfdt0p6OIOMbJO9RPAjtyff5X4HVrbRPgFPCQI6lERESkUNTtQ8o6RwpqY0w4MAR41/W5AfoCn7l2+QAY7kQ2ERERKRx1+5Cyzqk71P8EngWyXJ/XBE5bazNcnycC9RzIJSIiIoUUUTWCruFdVVBLmeX1gtoYMxQ4Ya1dV8TjHzHGrDXGrE1KSnJzOhERESmKka1GsvHYRnX7kDLJiTvUPYDbjDH7gU/I7urxL6CaMSbAtU84cDivg62171hrY6y1MSEhId7IKyIiIjegbh9Slnm9oLbW/sZaG26tjQRGAYustXcDi4E7XbvdD8z0djYREREpGnX7EE87deoUCxYswFrrdJQf8aV5qH8F/MIYE092n+r3HM4jIiIihTCi1Qh1+xC3y8zMZNmyZbz11lusXbsWX+zy62hBba39zlo71PXxPmttZ2ttE2vtCGvtZSeziYiISOGo24e426FDh3jnnXdYuHAhTZo04fHHH6d27dpOx/qRgBvvIiIiInJj9avWv9Lt47mezzkdR0qw1NRU4uLiWL9+PVWqVGHUqFE0b97c6Vj5UkEtIiIibjOi1Qienv808SnxNKnRxOk4UsJYa9m6dSvffvstFy9epGvXrvTp04egIN9eQNuX+lCLiIhICXel28c2dfuQwklJSeHjjz/miy++oFq1ajzyyCMMHDjQ54tp0B1qERERcaPc3T5+0/M3TseREiAzM5Ply5ezZMkSAgICuOWWW4iJicHPr+Tc91VBLSIiIm5VErt9HDlyhOXLl1O3bl3q1atH3bp1S8Sd0ZLuwIEDfPPNN5w8eZJWrVoxaNAgKleu7HSsQlNBLSIiIm51Z6s7eXr+08zYVnLuUp8/f56jR4+yfft2AIwxhISEXCmw69WrR+3atfH393c4aelw8eJFFixYwMaNG6lWrRp33XUXTZs2dTpWkRlfnBy7oGJiYuzatWudjiEiIiLX6PpuV9Iy01j/6HqnoxTKxYsXOXz48FWP1NRUAAICAqhTp85VRXb16tUxxjicuuSw1rJ582bmz5/PpUuX6NatG7169SIwMNDpaDdkjFlnrY3J6zndoRYRERG3G9l6ZInr9gFQoUIFmjZteuVuqbWW06dPX1Vgr1u3jtWrVwNQvnz5K11EcorsihUrOvkSfNbJkyeZPXs2+/fvJzw8nKFDhxIaGup0LLfQHWoRERFxu4NnDtLgnw14pe8rJabbR0FlZmaSlJR0VZGdlJR0ZUnsatWqXSmuIyMjqVOnjsOJnZWRkcGyZctYtmwZgYGB9OvXj44dO5a4O/vXu0OtglpEREQ8oqR2+yiKtLQ0jh49elWRfebMGQDuueceGjdu7HBCZyQkJDB79mySk5OJiopiwIABVKpUyelYRaIuHyIiIuJ1I1qN4JcLfsnelL00rlG6C8qgoCAaNGhAgwYNrmw7f/4877//PnPmzGH8+PEEBJSdsis1NZVvv/2WTZs2Ub169VL/S0XJmeBPRERESpQri7xsL5uLvFSqVInBgweTkpLCihUrnI7jNcnJybz33nts2bKFnj17Mn78+FJdTIPuUIuIiIiHNKjWgC71ujB923R+2f2XZGZlkmkzycjKIDPL9a/NvOrjnOcKsl9s/VgqBfl294HGjRvTqlUrli5dSlRUFNWrV3c6kkclJCQwffp0/Pz8uP/++6lfv77TkbxCBbWIiIh4TE63j8A/un9atK3jt9K6dmu3n9fdBg4cSHx8PHPnzmX06NElbjBeQa1bt445c+ZQs2ZNRo8eXep/echNBbWIiIh4zNiOY0nLTCMjKwN/P38C/ALwN/5XfRzgF4C/n/9VHxdkv4bVGzr98gqkSpUq9O7dm/nz57Nr1y5atGjhdCS3ysrKYv78+axevZomTZpw5513Uq5cOadjeZUKahEREfGYKuWqlLpp84qic+fObNy4kXnz5tGoUaNSs6z5pUuX+Pzzz4mPj6dLly4MGDAAP7+yN0Sv7L1iERERES/z9/dnyJAhnDlzhiVLljgdxy1OnTrF5MmT2bdvH0OHDmXQoEFlspgGFdQiIiIiXlG/fn2io6NZuXIlSUlJTscplgMHDvDuu+9y7tw57rnnHjp27Oh0JEepoBYRERHxkv79+xMUFMScOXMoqYvrbdy4kQ8//JDy5cvz8MMP07BhyejL7kkqqEVERES8pGLFivTr14/9+/ezdetWp+MUirWWuLg4Zs6cSYMGDXjooYeoWbOm07F8ggpqERERES/q0KEDdevW5dtvv+XSpUtOxymQtLQ0Pv30U5YvX07Hjh25++67KV++vNOxfIYKahEREREv8vPzY8iQIVy4cIHFixc7HeeGzpw5w+TJk9m9ezeDBg1iyJAh+Pv7Ox3Lp6igFhEREfGyunXrEhMTww8//MDRo0edjpOvxMREJk2axOnTp7nrrrvo0qVLqV2YpjhUUIuIiIg4oG/fvlSoUIHZs2f75ADFrVu3MmXKFIKCgnjooYdo0qSJ05F8lgpqEREREQeUL1+em2++mcOHD7Nhwwan41xhrWXx4sV8/vnn1KtXj4cffpiQkBCnY/k0FdQiIiIiDmnbti3169cnLi6OixcvOh2H9PR0Pv/8c5YsWUJ0dDT33nsvFSpUcDqWz1NBLSIiIuIQYwxDhgzh8uXLxMXFOZrl3LlzTJkyhW3bttG/f39uu+02AgICHM1UUqigFhEREXFQ7dq16dKlCxs2bODQoUOOZDh69CiTJk0iKSmJUaNG0aNHDw0+LAQV1CIiIiIO6927N5UrV2b27NlkZWV5te3Nmzfz/vvvY4xhzJgxNG/e3KvtlwYqqEVEREQcFhQUxKBBgzh+/Dhr1qzxSpuXL1/miy++4Msvv6ROnTqMHTuWsLAwr7Rd2qhjjIiIiIgPaNmyJY0bN2bx4sW0bt2aypUre6ytxMREPv/8c86cOUPv3r3p2bMnfn66z1pUeudEREREfIAxhsGDB5OZmcn8+fM90kZWVhZLlixh8uTJADz44IP06tVLxXQx6Q61iIiIiI+oUaMGsbGxfP/997Rv355GjRq57dxnzpzhyy+/5MCBA7Rp04YhQ4YQHBzstvOXZfp1RERERMSH9OjRg+rVqzNnzhwyMjLccs7t27czYcIEjh49yvDhw7n99ttVTLuRCmoRERERHxIYGMgtt9xCcnIyK1euLNa50tLSmDVrFjNmzKBmzZo8+uijtGvXTlPiuZm6fIiIiIj4mKZNm9KyZUuWLFlCVFQU1apVK/Q5jh49yueff05ycjKxsbH07t0bf39/94cV3aEWERER8UUDBw7EGMO8efMKdZy1lpUrV/Luu++SlpbGfffdR79+/VRMe5DuUIuIiIj4oKpVq9KrVy/i4uLYtWtXgRZcOX/+PF999RV79+6lRYsW3HrrrVSoUMELacs2FdQiIiIiPqpr165s2rSJefPm0ahRIwIDA/Pdd/fu3cycOZO0tDSGDh1Khw4d1FfaS9TlQ0RERMRH+fv7M3jwYE6fPs3SpUvz3CcjI4O5c+cybdo0KleuzCOPPELHjh1VTHuR7lCLiIiI+LDIyEjatm3LihUraNu2LbVq1bry3IkTJ/j88885ceIEXbp0oX///gQEqLzzNt2hFhEREfFxN998MwEBAcydOxdrLdZafvjhByZNmsSFCxe46667GDRokIpph+hdFxEREfFxlSpVom/fvsydO5e1a9eyd+9edu3aRZMmTRg2bBiVKlVyOmKZpoJaREREpASIiYlhw4YNzJkzB39/fwYOHEiXLl3UV9oHqKAWERERKQH8/PwYNmwY33//Pb169SIsLMzpSOKiglpERESkhAgLC+OnP/2p0zHkGhqUKCIiIiJSDF4vqI0xEcaYxcaY7caYbcaYJ13baxhjFhhj9rj+re7tbCIiIiIiheXEHeoM4GlrbSugK/C4MaYV8GtgobW2KbDQ9bmIiIiIiE/zekFtrT1qrV3v+vgcsAOoBwwDPnDt9gEw3NvZREREREQKy9E+1MaYSKA9sBoItdYedT11DAh1KpeIiIiISEE5VlAbYyoBnwNPWWvP5n7OWmsBm89xjxhj1hpj1iYlJXkhqYiIiIhI/hwpqI0xgWQX0/+11n7h2nzcGFPH9Xwd4ERex1pr37HWxlhrY0JCQrwTWEREREQkH07M8mGA94Ad1trXcj01C7jf9fH9wExvZxMRERERKSwnFnbpAdwLbDHGbHRtew74CzDdGPMQcAAY6UA2EREREZFC8XpBba1dBuS36Hw/b2YRERERESkurZQoIiIiIlIMKqhFRERERIpBBbWIiIiISDGooBYRERERKQYV1CIiIiIixaCCWkRERESkGFRQi4iIiIgUgwpqEREREZFiMNZapzMUmTEmiexVFSV/tYCTToeQ69I18n26Rr5N18f36Rr5Pl2jG2tgrQ3J64kSXVDLjRlj1lprY5zOIfnTNfJ9uka+TdfH9+ka+T5do+JRlw8RERERkWJQQS0iIiIiUgwqqEu/d5wOIDeka+T7dI18m66P79M18n26RsWgPtQiIiIiIsWgO9QiIiIiIsWggroEM8ZEGGMWG2O2G2O2GWOezPXcz4wxO13bX821/TfGmHhjzC5jzEBnkpcdhb1GxphIY0yqMWaj6zHBufRlQ37XyBjzaa7rsN8YszHXMfo68qLCXiN9HXnfda5RtDFmles6rDXGdHZtN8aYN1xfR5uNMR2cfQWlXxGuUW9jzJlcX0cvOvsKfFuA0wGkWDKAp621640xlYF1xpgFQCgwDGhnrb1sjKkNYIxpBYwCWgN1gThjTDNrbaZD+cuCQl0jl73W2mgHspZVeV4ja+1Pc3YwxvwDOOP6WF9H3leoa+SiryPvyu973avAS9baucaYwa7PewO3AE1djy7A265/xXMKe40AllprhzoTt2TRHeoSzFp71Fq73vXxOWAHUA8YD/zFWnvZ9dwJ1yHDgE+stZettQlAPNDZ+8nLjiJcI/Gy61wjIPtOGjASmObapK8jLyvCNRIvu841skAV125VgSOuj4cBH9psq4Bqxpg6Xo5dphThGkkhqKAuJYwxkUB7YDXQDOhpjFltjPneGNPJtVs94FCuwxLJ9UNJPKuA1wigoTFmg2t7TyeyllXXXKMcPYHj1to9rs/1deSgAl4j0NeRY665Rk8BfzPGHAL+DvzGtZu+jhxUwGsE0M0Ys8kYM9cY09rrQUsQFdSlgDGmEvA58JS19izZXXlqAF2BZ4Dprjs44pBCXKOjQH1rbXvgF8BUY0yVfE4rbpTHNcoxGt359AmFuEb6OnJIHtdoPPBza20E8HPgPSfzSaGu0Xqyl9puB/wb+MqBuCWGCuoSzhgTSPYXxn+ttV+4NicCX7j+lLYGyAJqAYeBiFyHh7u2iQcV5hq5uhEkA1hr1wF7yb6bLR6UzzXCGBMA3A58mmt3fR05oDDXSF9HzsjnGt0P5Hw8g/91j9LXkQMKc42stWetteddH88BAo0xtbwcucRQQV2Cue5ovgfssNa+luupr4A+rn2aAUHASWAWMMoYU84Y05DswSBrvBq6jCnsNTLGhBhj/F3bG5F9jfZ5NXQZc51rBNAf2GmtTcy17f/bu78Qzac4juPvz5bshkxKiUjtFU3MRAhRCjXKn6xsade/Vmtxp8jF+FdcrELiwsXmwoWwW/6WyF6Nsii7syMb5cq60G7G1LKL/br4/YYxzcjMr3k8s/N+3Tzn9zz9zjk9p9/Tt/Occ74+Rz220DHyOeq9fxmj/cCVbfkqYHpZztvAxva0j0uAyar6oWcdXoEWOkZJTpv+d7s9+WMVcKB3PV5ePOVjebsM2ACM5+8jvR4BtgHbkuwFjgC3V5PBZyLJ68BXNLt97/NkgiW3oDFKcgXwRJLfaGatN1fVwf+h3yvJnGPUzsisZ9Zyj6ryOeq9BY0R4HPUe/P91m0Cnm//SfgVuKf97H1ghGZT7yHgzp72dmVa6BitA+5N8jvwC7C+zAY4LzMlSpIkSR245EOSJEnqwIBakiRJ6sCAWpIkSerAgFqSJEnqwIBakiRJ6sCAWpL6VJKBJFva8ulJ3lzCtoaSjCxV/ZJ0LDOglqT+NQBsAaiq/VW1bgnbGqI5F1iStECeQy1JfSrJa8ANwD6a7GXnVNVgkjuAG4ETaLIAPkOTbXMDcBgYqaqDSdYCLwKn0iTP2FRVXye5BXgU+AOYpMk2+C2whib989PAu8ALwCBwHPBYVb3Vtn0TcDJwBvBqVT2+tN+EJPU3MyVKUv96GBisqqEkZ9MEudMGgWFgNU0w/FBVDSd5FtgIPAe8TJMl8JskFwMv0aQWHgWurarvkwxU1ZEko8CFVXU/QJKngI+r6q4kA8CuJB+1bV/Utn8I+CzJe1X1+RJ+D5LU1wyoJWl52llVU8BUkkngnfb9ceC8JCcClwJvJJm+5/j2dQx4pU2hvmOe+q8Brk/yYHu9GjirLX9YVQcAkuwALgcMqCWtWAbUkrQ8HZ5RPjrj+ijNb/sq4KeqGpp9Y1VtbmesrwO+SHLBHPUHuLmq9v3jzea+2WsFXTsoaUVzU6Ik9a8p4KTF3FhVPwPfteulSeP8try2qj6tqlHgR+DMOdr6AHgg7fR2kuEZn12d5JQka2jWco8tpo+SdKwwoJakPtUuqxhLshfYuogqbgPuTrIbmKDZ4AiwNcl4W+8nwG5gJ3Buki+T3Ao8SbMZcU+SifZ62i5gO7AH2O76aUkrnad8SJL+s/aUj782L0qSnKGWJEmSOnGGWpIkSerAGWpJkiSpAwNqSZIkqQMDakmSJKkDA2pJkiSpAwNqSZIkqQMDakmSJKmDPwHr7CI0R/dKzQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "plt.figure(figsize=(12, 8))\n", - "plt.plot(forecasts['truth'].iloc[-24:], color='green', label='observed series')\n", - "plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['prediction'], color='purple', label='point prediction')\n", - "plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['lower'], color='grey')\n", - "plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['upper'], color='grey')\n", - "plt.xlabel('timestep')\n", - "plt.ylabel('# sunspots')\n", - "plt.title(\"Forecasted amount of sunspots for the next semester\")\n", - "plt.legend()\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Conclusion\n", - "\n", - "In this tutorial, we have gone through how you can train a machine learning model with Lightwood to produce forecasts for a univariate time series task.\n", - "\n", - "There are additional parameters to further customize your timeseries settings and/or prediction insights, so be sure to check the rest of the documentation." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/_sources/tutorials/tutorial_update_models/Tutorial -- Update a predictor.ipynb.txt b/docs/_sources/tutorials/tutorial_update_models/Tutorial -- Update a predictor.ipynb.txt deleted file mode 100644 index fcb2a4397..000000000 --- a/docs/_sources/tutorials/tutorial_update_models/Tutorial -- Update a predictor.ipynb.txt +++ /dev/null @@ -1,703 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Introduction\n", - "\n", - "In this tutorial, we will go through an example to update a preexisting model. This might be useful when you come across additional data that you would want to consider, without having to train a model from scratch.\n", - "\n", - "The main abstraction that Lightwood offers for this is the `BaseMixer.partial_fit()` method. To call it, you need to pass new training data and a held-out dev subset for internal mixer usage (e.g. early stopping). If you are using an aggregate ensemble, it's likely you will want to do this for every single mixer. The convienient `PredictorInterface.adjust()` does this automatically for you.\n", - "\n", - "\n", - "# Initial model training\n", - "\n", - "First, let's train a Lightwood predictor for the `concrete strength` dataset:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem, predictor_from_json_ai\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Train dataframe shape: (206, 10)\n", - "Update dataframe shape: (618, 10)\n", - "Test dataframe shape: (206, 10)\n" - ] - } - ], - "source": [ - "# Load data\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/staging/tests/data/concrete_strength.csv')\n", - "\n", - "df = df.sample(frac=1, random_state=1)\n", - "train_df = df[:int(0.2*len(df))]\n", - "update_df = df[int(0.2*len(df)):int(0.8*len(df))]\n", - "test_df = df[int(0.8*len(df)):]\n", - "\n", - "print(f'Train dataframe shape: {train_df.shape}')\n", - "print(f'Update dataframe shape: {update_df.shape}')\n", - "print(f'Test dataframe shape: {test_df.shape}')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that we have three different data splits.\n", - "\n", - "We will use the `training` split for the initial model training. As you can see, it's only a 20% of the total data we have. The `update` split will be used as training data to adjust/update our model. Finally, the held out `test` set will give us a rough idea of the impact our updating procedure has on the model's predictive capabilities." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Analyzing a sample of 979\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:from a total population of 1030, this is equivalent to 95.0% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Using 15 processes to deduct types.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Unable to import black formatter, predictor code might be a bit ugly.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Performing statistical analysis on data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Splitting the data into train/test\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Preparing the encoders\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 2\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 3\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 4\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 5\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 6\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 7\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 8\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 9\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 10\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: concrete_strength\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: id\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: cement\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: slag\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: flyAsh\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: water\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: superPlasticizer\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: coarseAggregate\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: fineAggregate\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: age\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Training the mixers\u001b[0m\n", - "torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - "This overload of addcmul_ is deprecated:\n", - "\taddcmul_(Number value, Tensor tensor1, Tensor tensor2)\n", - "Consider using one of the following signatures instead:\n", - "\taddcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at ../torch/csrc/utils/python_arg_parser.cpp:1005.)\n", - "\u001b[32mINFO:lightwood-91181:Loss of 7.69654655456543 with learning rate 0.0001\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 6.121406078338623 with learning rate 0.00014\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 5.7169036865234375 with learning rate 0.00019599999999999997\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 4.907417297363281 with learning rate 0.00027439999999999995\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 3.7602126598358154 with learning rate 0.0003841599999999999\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 1.8155415058135986 with learning rate 0.0005378239999999999\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 3.7833187580108643 with learning rate 0.0007529535999999998\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 8.216030836105347 with learning rate 0.0010541350399999995\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Found learning rate of: 0.0005378239999999999\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 1: 0.7302289009094238\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 2: 0.9203720092773438\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 3: 0.8405624628067017\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 4: 0.7608699202537537\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 5: 0.6823285222053528\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 6: 0.606808602809906\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 7: 0.4470987617969513\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 8: 0.3933545649051666\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 9: 0.3497759997844696\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 10: 0.3151411712169647\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 11: 0.2879962623119354\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 12: 0.2667108178138733\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 13: 0.23354031145572662\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 14: 0.21926474571228027\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 15: 0.20496906340122223\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 16: 0.19059491157531738\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 17: 0.17612512409687042\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 18: 0.161383256316185\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 19: 0.12839828431606293\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 20: 0.1162123903632164\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 21: 0.10669219493865967\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 22: 0.09954904764890671\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 23: 0.09420691430568695\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 24: 0.0900391936302185\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 25: 0.08349908888339996\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 26: 0.0822099968791008\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 27: 0.08120812475681305\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 28: 0.0804857686161995\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 29: 0.07996372133493423\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 30: 0.07936403155326843\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 31: 0.07869081199169159\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 32: 0.07849359512329102\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 33: 0.07820077985525131\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 34: 0.07790301740169525\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 35: 0.07746117562055588\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 36: 0.0766073539853096\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 37: 0.07440945506095886\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 38: 0.07304742932319641\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 39: 0.07175709307193756\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 40: 0.0706694945693016\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 41: 0.06960804760456085\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 42: 0.0683063194155693\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 43: 0.06553898006677628\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 44: 0.06447519361972809\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 45: 0.06355087459087372\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 46: 0.06285689026117325\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 47: 0.0621829479932785\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 48: 0.06127836927771568\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 49: 0.05949181318283081\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 50: 0.058798886835575104\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 51: 0.058218929916620255\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 52: 0.057854749262332916\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 53: 0.05746406316757202\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 54: 0.056835610419511795\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 55: 0.05569766089320183\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 56: 0.05525219812989235\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 57: 0.05490746721625328\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 58: 0.054767243564128876\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 59: 0.05455196276307106\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 60: 0.0540977418422699\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 61: 0.05336076393723488\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 62: 0.053060129284858704\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 63: 0.05285469442605972\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 64: 0.0528554692864418\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 65: 0.05273965373635292\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 66: 0.05239948257803917\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 67: 0.05194811150431633\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 68: 0.05178629234433174\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 69: 0.05171119421720505\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 70: 0.05184203386306763\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 71: 0.05181184783577919\u001b[0m\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 72: 0.05157444253563881\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 73: 0.05137106031179428\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 74: 0.05131785199046135\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 75: 0.05133713781833649\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 76: 0.05156172439455986\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Ensembling the mixer\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Mixer: Neural got accuracy: 0.5960601553597429\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Picked best mixer: Neural\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Analyzing the ensemble of mixers\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block ICP is now running its analyze() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block AccStats is now running its analyze() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block GlobalFeatureImportance is now running its analyze() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Adjustment on validation requested.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Updating the mixers\u001b[0m\n", - "torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 1: 0.06892643496394157\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 2: 0.06978078782558442\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 3: 0.06783530339598656\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 4: 0.07201590612530709\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 5: 0.0718848429620266\u001b[0m\n" - ] - } - ], - "source": [ - "# Define predictive task and predictor\n", - "target = 'concrete_strength'\n", - "pdef = ProblemDefinition.from_dict({'target': target, 'time_aim': 200})\n", - "jai = json_ai_from_problem(df, pdef)\n", - "\n", - "# We will keep the architecture simple: a single neural mixer, and a `BestOf` ensemble:\n", - "jai.outputs[target].mixers = [{\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": False,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": False,\n", - " }\n", - "}]\n", - "\n", - "jai.outputs[target].ensemble = {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " }\n", - "}\n", - "\n", - "# Build and train the predictor\n", - "predictor = predictor_from_json_ai(jai)\n", - "predictor.learn(train_df)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block ICP is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block AccStats is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block GlobalFeatureImportance is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
predictiontruthconfidencelowerupper
051.19360371.300.999130.54044371.846764
128.50339039.600.99917.85022949.156551
218.35613910.790.99910.00000039.009300
316.0620944.830.99910.00000036.715254
432.62362947.710.999111.97046953.276790
..................
20145.63381140.930.999124.98065066.286972
20241.61320952.820.999120.96004862.266369
20331.29704439.660.999110.64388351.950204
20429.40925813.290.99918.75609750.062418
20537.71213817.840.999117.05897758.365298
\n", - "

206 rows × 5 columns

\n", - "
" - ], - "text/plain": [ - " prediction truth confidence lower upper\n", - "0 51.193603 71.30 0.9991 30.540443 71.846764\n", - "1 28.503390 39.60 0.9991 7.850229 49.156551\n", - "2 18.356139 10.79 0.9991 0.000000 39.009300\n", - "3 16.062094 4.83 0.9991 0.000000 36.715254\n", - "4 32.623629 47.71 0.9991 11.970469 53.276790\n", - ".. ... ... ... ... ...\n", - "201 45.633811 40.93 0.9991 24.980650 66.286972\n", - "202 41.613209 52.82 0.9991 20.960048 62.266369\n", - "203 31.297044 39.66 0.9991 10.643883 51.950204\n", - "204 29.409258 13.29 0.9991 8.756097 50.062418\n", - "205 37.712138 17.84 0.9991 17.058977 58.365298\n", - "\n", - "[206 rows x 5 columns]" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Train and get predictions for the held out test set\n", - "predictions = predictor.predict(test_df)\n", - "predictions" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Updating the predictor\n", - "\n", - "As previously mentioned, you can update any given mixer with a `BaseMixer.partial_fit()` call. If you have multiple mixers and want to update them all at once, you should use `PredictorInterface.adjust()`. \n", - "\n", - "For both of these methods, two encoded datasources are needed as input (for `adjust` you need to wrap them in a dictionary with 'old' and 'new' keys). \n", - "\n", - "Let's `adjust` our predictor:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Updating the mixers\u001b[0m\n", - "torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 1: 0.06545061928530534\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 2: 0.0679960281898578\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 3: 0.07171888339022796\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 4: 0.07307156516859929\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 5: 0.06360626469055812\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 6: 0.06457449619968732\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 7: 0.057915804286797844\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 8: 0.06492673171063264\u001b[0m\n" - ] - } - ], - "source": [ - "from lightwood.data import EncodedDs\n", - "\n", - "train_ds = EncodedDs(predictor.encoders, train_df, target)\n", - "update_ds = EncodedDs(predictor.encoders, update_df, target)\n", - "\n", - "predictor.adjust({'old': train_ds, 'new': update_ds})" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block ICP is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block AccStats is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block GlobalFeatureImportance is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
predictiontruthconfidencelowerupper
053.39225371.300.999132.73909374.045414
127.88629239.600.99917.23313248.539453
216.30178810.790.99910.00000036.954948
313.8628274.830.99910.00000034.515988
431.42103547.710.999110.76787552.074196
..................
20142.63103740.930.999121.97787663.284197
20237.50244452.820.999116.84928358.155604
20329.49148739.660.99918.83832650.144647
20428.01357013.290.99917.36041048.666731
20535.33604317.840.999114.68288355.989204
\n", - "

206 rows × 5 columns

\n", - "
" - ], - "text/plain": [ - " prediction truth confidence lower upper\n", - "0 53.392253 71.30 0.9991 32.739093 74.045414\n", - "1 27.886292 39.60 0.9991 7.233132 48.539453\n", - "2 16.301788 10.79 0.9991 0.000000 36.954948\n", - "3 13.862827 4.83 0.9991 0.000000 34.515988\n", - "4 31.421035 47.71 0.9991 10.767875 52.074196\n", - ".. ... ... ... ... ...\n", - "201 42.631037 40.93 0.9991 21.977876 63.284197\n", - "202 37.502444 52.82 0.9991 16.849283 58.155604\n", - "203 29.491487 39.66 0.9991 8.838326 50.144647\n", - "204 28.013570 13.29 0.9991 7.360410 48.666731\n", - "205 35.336043 17.84 0.9991 14.682883 55.989204\n", - "\n", - "[206 rows x 5 columns]" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "new_predictions = predictor.predict(test_df)\n", - "new_predictions" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Nice! Our predictor was updated, and new predictions are looking good. Let's compare the old and new accuracies:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Old Accuracy: 0.583\n", - "New Accuracy: 0.624\n" - ] - } - ], - "source": [ - "from sklearn.metrics import r2_score\n", - "\n", - "old_acc = r2_score(predictions['truth'], predictions['prediction'])\n", - "new_acc = r2_score(new_predictions['truth'], new_predictions['prediction'])\n", - "\n", - "print(f'Old Accuracy: {round(old_acc, 3)}\\nNew Accuracy: {round(new_acc, 3)}')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "After updating, we see an increase in the R2 score of predictions for the held out test set.\n", - "\n", - "## Conclusion\n", - "\n", - "We have gone through a simple example of how Lightwood predictors can leverage newly acquired data to improve their predictions. The interface for doing so is fairly simple, requiring only some new data and a single call to update.\n", - "\n", - "You can further customize the logic for updating your mixers by modifying the `partial_fit()` methods in them." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/_static/basic.css b/docs/_static/basic.css deleted file mode 100644 index 912859b55..000000000 --- a/docs/_static/basic.css +++ /dev/null @@ -1,904 +0,0 @@ -/* - * basic.css - * ~~~~~~~~~ - * - * Sphinx stylesheet -- basic theme. - * - * :copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/* -- main layout ----------------------------------------------------------- */ - -div.clearer { - clear: both; -} - -div.section::after { - display: block; - content: ''; - clear: left; -} - -/* -- relbar ---------------------------------------------------------------- */ - -div.related { - width: 100%; - font-size: 90%; -} - -div.related h3 { - display: none; -} - -div.related ul { - margin: 0; - padding: 0 0 0 10px; - list-style: none; -} - -div.related li { - display: inline; -} - -div.related li.right { - float: right; - margin-right: 5px; -} - -/* -- sidebar --------------------------------------------------------------- */ - -div.sphinxsidebarwrapper { - padding: 10px 5px 0 10px; -} - -div.sphinxsidebar { - float: left; - width: 230px; - margin-left: -100%; - font-size: 90%; - word-wrap: break-word; - overflow-wrap : break-word; -} - -div.sphinxsidebar ul { - list-style: none; -} - -div.sphinxsidebar ul ul, -div.sphinxsidebar ul.want-points { - margin-left: 20px; - list-style: square; -} - -div.sphinxsidebar ul ul { - margin-top: 0; - margin-bottom: 0; -} - -div.sphinxsidebar form { - margin-top: 10px; -} - -div.sphinxsidebar input { - border: 1px solid #98dbcc; - font-family: sans-serif; - font-size: 1em; -} - -div.sphinxsidebar #searchbox form.search { - overflow: hidden; -} - -div.sphinxsidebar #searchbox input[type="text"] { - float: left; - width: 80%; - padding: 0.25em; - box-sizing: border-box; -} - -div.sphinxsidebar #searchbox input[type="submit"] { - float: left; - width: 20%; - border-left: none; - padding: 0.25em; - box-sizing: border-box; -} - - -img { - border: 0; - max-width: 100%; -} - -/* -- search page ----------------------------------------------------------- */ - -ul.search { - margin: 10px 0 0 20px; - padding: 0; -} - -ul.search li { - padding: 5px 0 5px 20px; - background-image: url(file.png); - background-repeat: no-repeat; - background-position: 0 7px; -} - -ul.search li a { - font-weight: bold; -} - -ul.search li p.context { - color: #888; - margin: 2px 0 0 30px; - text-align: left; -} - -ul.keywordmatches li.goodmatch a { - font-weight: bold; -} - -/* -- index page ------------------------------------------------------------ */ - -table.contentstable { - width: 90%; - margin-left: auto; - margin-right: auto; -} - -table.contentstable p.biglink { - line-height: 150%; -} - -a.biglink { - font-size: 1.3em; -} - -span.linkdescr { - font-style: italic; - padding-top: 5px; - font-size: 90%; -} - -/* -- general index --------------------------------------------------------- */ - -table.indextable { - width: 100%; -} - -table.indextable td { - text-align: left; - vertical-align: top; -} - -table.indextable ul { - margin-top: 0; - margin-bottom: 0; - list-style-type: none; -} - -table.indextable > tbody > tr > td > ul { - padding-left: 0em; -} - -table.indextable tr.pcap { - height: 10px; -} - -table.indextable tr.cap { - margin-top: 10px; - background-color: #f2f2f2; -} - -img.toggler { - margin-right: 3px; - margin-top: 3px; - cursor: pointer; -} - -div.modindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -div.genindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -/* -- domain module index --------------------------------------------------- */ - -table.modindextable td { - padding: 2px; - border-collapse: collapse; -} - -/* -- general body styles --------------------------------------------------- */ - -div.body { - min-width: 450px; - max-width: 800px; -} - -div.body p, div.body dd, div.body li, div.body blockquote { - -moz-hyphens: auto; - -ms-hyphens: auto; - -webkit-hyphens: auto; - hyphens: auto; -} - -a.headerlink { - visibility: hidden; -} - -a.brackets:before, -span.brackets > a:before{ - content: "["; -} - -a.brackets:after, -span.brackets > a:after { - content: "]"; -} - -h1:hover > a.headerlink, -h2:hover > a.headerlink, -h3:hover > a.headerlink, -h4:hover > a.headerlink, -h5:hover > a.headerlink, -h6:hover > a.headerlink, -dt:hover > a.headerlink, -caption:hover > a.headerlink, -p.caption:hover > a.headerlink, -div.code-block-caption:hover > a.headerlink { - visibility: visible; -} - -div.body p.caption { - text-align: inherit; -} - -div.body td { - text-align: left; -} - -.first { - margin-top: 0 !important; -} - -p.rubric { - margin-top: 30px; - font-weight: bold; -} - -img.align-left, figure.align-left, .figure.align-left, object.align-left { - clear: left; - float: left; - margin-right: 1em; -} - -img.align-right, figure.align-right, .figure.align-right, object.align-right { - clear: right; - float: right; - margin-left: 1em; -} - -img.align-center, figure.align-center, .figure.align-center, object.align-center { - display: block; - margin-left: auto; - margin-right: auto; -} - -img.align-default, figure.align-default, .figure.align-default { - display: block; - margin-left: auto; - margin-right: auto; -} - -.align-left { - text-align: left; -} - -.align-center { - text-align: center; -} - -.align-default { - text-align: center; -} - -.align-right { - text-align: right; -} - -/* -- sidebars -------------------------------------------------------------- */ - -div.sidebar, -aside.sidebar { - margin: 0 0 0.5em 1em; - border: 1px solid #ddb; - padding: 7px; - background-color: #ffe; - width: 40%; - float: right; - clear: right; - overflow-x: auto; -} - -p.sidebar-title { - font-weight: bold; -} - -div.admonition, div.topic, blockquote { - clear: left; -} - -/* -- topics ---------------------------------------------------------------- */ - -div.topic { - border: 1px solid #ccc; - padding: 7px; - margin: 10px 0 10px 0; -} - -p.topic-title { - font-size: 1.1em; - font-weight: bold; - margin-top: 10px; -} - -/* -- admonitions ----------------------------------------------------------- */ - -div.admonition { - margin-top: 10px; - margin-bottom: 10px; - padding: 7px; -} - -div.admonition dt { - font-weight: bold; -} - -p.admonition-title { - margin: 0px 10px 5px 0px; - font-weight: bold; -} - -div.body p.centered { - text-align: center; - margin-top: 25px; -} - -/* -- content of sidebars/topics/admonitions -------------------------------- */ - -div.sidebar > :last-child, -aside.sidebar > :last-child, -div.topic > :last-child, -div.admonition > :last-child { - margin-bottom: 0; -} - -div.sidebar::after, -aside.sidebar::after, -div.topic::after, -div.admonition::after, -blockquote::after { - display: block; - content: ''; - clear: both; -} - -/* -- tables ---------------------------------------------------------------- */ - -table.docutils { - margin-top: 10px; - margin-bottom: 10px; - border: 0; - border-collapse: collapse; -} - -table.align-center { - margin-left: auto; - margin-right: auto; -} - -table.align-default { - margin-left: auto; - margin-right: auto; -} - -table caption span.caption-number { - font-style: italic; -} - -table caption span.caption-text { -} - -table.docutils td, table.docutils th { - padding: 1px 8px 1px 5px; - border-top: 0; - border-left: 0; - border-right: 0; - border-bottom: 1px solid #aaa; -} - -table.footnote td, table.footnote th { - border: 0 !important; -} - -th { - text-align: left; - padding-right: 5px; -} - -table.citation { - border-left: solid 1px gray; - margin-left: 1px; -} - -table.citation td { - border-bottom: none; -} - -th > :first-child, -td > :first-child { - margin-top: 0px; -} - -th > :last-child, -td > :last-child { - margin-bottom: 0px; -} - -/* -- figures --------------------------------------------------------------- */ - -div.figure, figure { - margin: 0.5em; - padding: 0.5em; -} - -div.figure p.caption, figcaption { - padding: 0.3em; -} - -div.figure p.caption span.caption-number, -figcaption span.caption-number { - font-style: italic; -} - -div.figure p.caption span.caption-text, -figcaption span.caption-text { -} - -/* -- field list styles ----------------------------------------------------- */ - -table.field-list td, table.field-list th { - border: 0 !important; -} - -.field-list ul { - margin: 0; - padding-left: 1em; -} - -.field-list p { - margin: 0; -} - -.field-name { - -moz-hyphens: manual; - -ms-hyphens: manual; - -webkit-hyphens: manual; - hyphens: manual; -} - -/* -- hlist styles ---------------------------------------------------------- */ - -table.hlist { - margin: 1em 0; -} - -table.hlist td { - vertical-align: top; -} - -/* -- object description styles --------------------------------------------- */ - -.sig { - font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; -} - -.sig-name, code.descname { - background-color: transparent; - font-weight: bold; -} - -.sig-name { - font-size: 1.1em; -} - -code.descname { - font-size: 1.2em; -} - -.sig-prename, code.descclassname { - background-color: transparent; -} - -.optional { - font-size: 1.3em; -} - -.sig-paren { - font-size: larger; -} - -.sig-param.n { - font-style: italic; -} - -/* C++ specific styling */ - -.sig-inline.c-texpr, -.sig-inline.cpp-texpr { - font-family: unset; -} - -.sig.c .k, .sig.c .kt, -.sig.cpp .k, .sig.cpp .kt { - color: #0033B3; -} - -.sig.c .m, -.sig.cpp .m { - color: #1750EB; -} - -.sig.c .s, .sig.c .sc, -.sig.cpp .s, .sig.cpp .sc { - color: #067D17; -} - - -/* -- other body styles ----------------------------------------------------- */ - -ol.arabic { - list-style: decimal; -} - -ol.loweralpha { - list-style: lower-alpha; -} - -ol.upperalpha { - list-style: upper-alpha; -} - -ol.lowerroman { - list-style: lower-roman; -} - -ol.upperroman { - list-style: upper-roman; -} - -:not(li) > ol > li:first-child > :first-child, -:not(li) > ul > li:first-child > :first-child { - margin-top: 0px; -} - -:not(li) > ol > li:last-child > :last-child, -:not(li) > ul > li:last-child > :last-child { - margin-bottom: 0px; -} - -ol.simple ol p, -ol.simple ul p, -ul.simple ol p, -ul.simple ul p { - margin-top: 0; -} - -ol.simple > li:not(:first-child) > p, -ul.simple > li:not(:first-child) > p { - margin-top: 0; -} - -ol.simple p, -ul.simple p { - margin-bottom: 0; -} - -dl.footnote > dt, -dl.citation > dt { - float: left; - margin-right: 0.5em; -} - -dl.footnote > dd, -dl.citation > dd { - margin-bottom: 0em; -} - -dl.footnote > dd:after, -dl.citation > dd:after { - content: ""; - clear: both; -} - -dl.field-list { - display: grid; - grid-template-columns: fit-content(30%) auto; -} - -dl.field-list > dt { - font-weight: bold; - word-break: break-word; - padding-left: 0.5em; - padding-right: 5px; -} - -dl.field-list > dt:after { - content: ":"; -} - -dl.field-list > dd { - padding-left: 0.5em; - margin-top: 0em; - margin-left: 0em; - margin-bottom: 0em; -} - -dl { - margin-bottom: 15px; -} - -dd > :first-child { - margin-top: 0px; -} - -dd ul, dd table { - margin-bottom: 10px; -} - -dd { - margin-top: 3px; - margin-bottom: 10px; - margin-left: 30px; -} - -dl > dd:last-child, -dl > dd:last-child > :last-child { - margin-bottom: 0; -} - -dt:target, span.highlighted { - background-color: #fbe54e; -} - -rect.highlighted { - fill: #fbe54e; -} - -dl.glossary dt { - font-weight: bold; - font-size: 1.1em; -} - -.versionmodified { - font-style: italic; -} - -.system-message { - background-color: #fda; - padding: 5px; - border: 3px solid red; -} - -.footnote:target { - background-color: #ffa; -} - -.line-block { - display: block; - margin-top: 1em; - margin-bottom: 1em; -} - -.line-block .line-block { - margin-top: 0; - margin-bottom: 0; - margin-left: 1.5em; -} - -.guilabel, .menuselection { - font-family: sans-serif; -} - -.accelerator { - text-decoration: underline; -} - -.classifier { - font-style: oblique; -} - -.classifier:before { - font-style: normal; - margin: 0.5em; - content: ":"; -} - -abbr, acronym { - border-bottom: dotted 1px; - cursor: help; -} - -/* -- code displays --------------------------------------------------------- */ - -pre { - overflow: auto; - overflow-y: hidden; /* fixes display issues on Chrome browsers */ -} - -pre, div[class*="highlight-"] { - clear: both; -} - -span.pre { - -moz-hyphens: none; - -ms-hyphens: none; - -webkit-hyphens: none; - hyphens: none; -} - -div[class*="highlight-"] { - margin: 1em 0; -} - -td.linenos pre { - border: 0; - background-color: transparent; - color: #aaa; -} - -table.highlighttable { - display: block; -} - -table.highlighttable tbody { - display: block; -} - -table.highlighttable tr { - display: flex; -} - -table.highlighttable td { - margin: 0; - padding: 0; -} - -table.highlighttable td.linenos { - padding-right: 0.5em; -} - -table.highlighttable td.code { - flex: 1; - overflow: hidden; -} - -.highlight .hll { - display: block; -} - -div.highlight pre, -table.highlighttable pre { - margin: 0; -} - -div.code-block-caption + div { - margin-top: 0; -} - -div.code-block-caption { - margin-top: 1em; - padding: 2px 5px; - font-size: small; -} - -div.code-block-caption code { - background-color: transparent; -} - -table.highlighttable td.linenos, -span.linenos, -div.highlight span.gp { /* gp: Generic.Prompt */ - user-select: none; - -webkit-user-select: text; /* Safari fallback only */ - -webkit-user-select: none; /* Chrome/Safari */ - -moz-user-select: none; /* Firefox */ - -ms-user-select: none; /* IE10+ */ -} - -div.code-block-caption span.caption-number { - padding: 0.1em 0.3em; - font-style: italic; -} - -div.code-block-caption span.caption-text { -} - -div.literal-block-wrapper { - margin: 1em 0; -} - -code.xref, a code { - background-color: transparent; - font-weight: bold; -} - -h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { - background-color: transparent; -} - -.viewcode-link { - float: right; -} - -.viewcode-back { - float: right; - font-family: sans-serif; -} - -div.viewcode-block:target { - margin: -1px -10px; - padding: 0 10px; -} - -/* -- math display ---------------------------------------------------------- */ - -img.math { - vertical-align: middle; -} - -div.body div.math p { - text-align: center; -} - -span.eqno { - float: right; -} - -span.eqno a.headerlink { - position: absolute; - z-index: 1; -} - -div.math:hover a.headerlink { - visibility: visible; -} - -/* -- printout stylesheet --------------------------------------------------- */ - -@media print { - div.document, - div.documentwrapper, - div.bodywrapper { - margin: 0 !important; - width: 100%; - } - - div.sphinxsidebar, - div.related, - div.footer, - #top-link { - display: none; - } -} \ No newline at end of file diff --git a/docs/_static/css/badge_only.css b/docs/_static/css/badge_only.css deleted file mode 100644 index e380325bc..000000000 --- a/docs/_static/css/badge_only.css +++ /dev/null @@ -1 +0,0 @@ -.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/docs/_static/css/fonts/Roboto-Slab-Bold.woff b/docs/_static/css/fonts/Roboto-Slab-Bold.woff deleted file mode 100644 index 6cb600001..000000000 Binary files a/docs/_static/css/fonts/Roboto-Slab-Bold.woff and /dev/null differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 b/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 deleted file mode 100644 index 7059e2314..000000000 Binary files a/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Regular.woff b/docs/_static/css/fonts/Roboto-Slab-Regular.woff deleted file mode 100644 index f815f63f9..000000000 Binary files a/docs/_static/css/fonts/Roboto-Slab-Regular.woff and /dev/null differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 b/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 deleted file mode 100644 index f2c76e5bd..000000000 Binary files a/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.eot b/docs/_static/css/fonts/fontawesome-webfont.eot deleted file mode 100644 index e9f60ca95..000000000 Binary files a/docs/_static/css/fonts/fontawesome-webfont.eot and /dev/null differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.svg b/docs/_static/css/fonts/fontawesome-webfont.svg deleted file mode 100644 index 855c845e5..000000000 --- a/docs/_static/css/fonts/fontawesome-webfont.svg +++ /dev/null @@ -1,2671 +0,0 @@ - - - - -Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 - By ,,, -Copyright Dave Gandy 2016. All rights reserved. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/_static/css/fonts/fontawesome-webfont.ttf b/docs/_static/css/fonts/fontawesome-webfont.ttf deleted file mode 100644 index 35acda2fa..000000000 Binary files a/docs/_static/css/fonts/fontawesome-webfont.ttf and /dev/null differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.woff b/docs/_static/css/fonts/fontawesome-webfont.woff deleted file mode 100644 index 400014a4b..000000000 Binary files a/docs/_static/css/fonts/fontawesome-webfont.woff and /dev/null differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.woff2 b/docs/_static/css/fonts/fontawesome-webfont.woff2 deleted file mode 100644 index 4d13fc604..000000000 Binary files a/docs/_static/css/fonts/fontawesome-webfont.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-bold-italic.woff b/docs/_static/css/fonts/lato-bold-italic.woff deleted file mode 100644 index 88ad05b9f..000000000 Binary files a/docs/_static/css/fonts/lato-bold-italic.woff and /dev/null differ diff --git a/docs/_static/css/fonts/lato-bold-italic.woff2 b/docs/_static/css/fonts/lato-bold-italic.woff2 deleted file mode 100644 index c4e3d804b..000000000 Binary files a/docs/_static/css/fonts/lato-bold-italic.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-bold.woff b/docs/_static/css/fonts/lato-bold.woff deleted file mode 100644 index c6dff51f0..000000000 Binary files a/docs/_static/css/fonts/lato-bold.woff and /dev/null differ diff --git a/docs/_static/css/fonts/lato-bold.woff2 b/docs/_static/css/fonts/lato-bold.woff2 deleted file mode 100644 index bb195043c..000000000 Binary files a/docs/_static/css/fonts/lato-bold.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-normal-italic.woff b/docs/_static/css/fonts/lato-normal-italic.woff deleted file mode 100644 index 76114bc03..000000000 Binary files a/docs/_static/css/fonts/lato-normal-italic.woff and /dev/null differ diff --git a/docs/_static/css/fonts/lato-normal-italic.woff2 b/docs/_static/css/fonts/lato-normal-italic.woff2 deleted file mode 100644 index 3404f37e2..000000000 Binary files a/docs/_static/css/fonts/lato-normal-italic.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-normal.woff b/docs/_static/css/fonts/lato-normal.woff deleted file mode 100644 index ae1307ff5..000000000 Binary files a/docs/_static/css/fonts/lato-normal.woff and /dev/null differ diff --git a/docs/_static/css/fonts/lato-normal.woff2 b/docs/_static/css/fonts/lato-normal.woff2 deleted file mode 100644 index 3bf984332..000000000 Binary files a/docs/_static/css/fonts/lato-normal.woff2 and /dev/null differ diff --git a/docs/_static/css/theme.css b/docs/_static/css/theme.css deleted file mode 100644 index 8cd4f101a..000000000 --- a/docs/_static/css/theme.css +++ /dev/null @@ -1,4 +0,0 @@ -html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a,.wy-menu-vertical li.current>a span.toctree-expand:before,.wy-menu-vertical li.on a,.wy-menu-vertical li.on a span.toctree-expand:before,.wy-menu-vertical li span.toctree-expand:before,.wy-nav-top a,.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! - * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome - * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) - */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a span.toctree-expand,.wy-menu-vertical li.on a span.toctree-expand,.wy-menu-vertical li span.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p.caption .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a span.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a span.fa-pull-left.toctree-expand,.wy-menu-vertical li span.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p.caption .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a span.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a span.fa-pull-right.toctree-expand,.wy-menu-vertical li span.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p.caption .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a span.pull-left.toctree-expand,.wy-menu-vertical li.on a span.pull-left.toctree-expand,.wy-menu-vertical li span.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p.caption .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a span.pull-right.toctree-expand,.wy-menu-vertical li.on a span.pull-right.toctree-expand,.wy-menu-vertical li span.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a span.toctree-expand:before,.wy-menu-vertical li.on a span.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li span.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a span.toctree-expand,.wy-menu-vertical li.on a span.toctree-expand,.wy-menu-vertical li span.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a span.toctree-expand:before,.wy-menu-vertical li.on a span.toctree-expand:before,.wy-menu-vertical li span.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a span.toctree-expand,.wy-menu-vertical li.on a span.toctree-expand,.wy-menu-vertical li a span.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li span.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p.caption .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a span.toctree-expand,.btn .wy-menu-vertical li.on a span.toctree-expand,.btn .wy-menu-vertical li span.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p.caption .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a span.toctree-expand,.nav .wy-menu-vertical li.on a span.toctree-expand,.nav .wy-menu-vertical li span.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p.caption .btn .headerlink,.rst-content p.caption .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn span.toctree-expand,.wy-menu-vertical li.current>a .btn span.toctree-expand,.wy-menu-vertical li.current>a .nav span.toctree-expand,.wy-menu-vertical li .nav span.toctree-expand,.wy-menu-vertical li.on a .btn span.toctree-expand,.wy-menu-vertical li.on a .nav span.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p.caption .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li span.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p.caption .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li span.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p.caption .btn .fa-large.headerlink,.rst-content p.caption .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn span.fa-large.toctree-expand,.wy-menu-vertical li .nav span.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p.caption .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li span.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p.caption .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li span.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p.caption .btn .fa-spin.headerlink,.rst-content p.caption .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn span.fa-spin.toctree-expand,.wy-menu-vertical li .nav span.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p.caption .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li span.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p.caption .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li span.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p.caption .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li span.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p.caption .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini span.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol li,.rst-content ol.arabic li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content ol.arabic li p:last-child,.rst-content ol.arabic li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol li ul li,.rst-content ol.arabic li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs li{display:inline-block}.wy-breadcrumbs li.wy-breadcrumbs-aside{float:right}.wy-breadcrumbs li a{display:inline-block;padding:5px}.wy-breadcrumbs li a:first-child{padding-left:0}.rst-content .wy-breadcrumbs li tt,.wy-breadcrumbs li .rst-content tt,.wy-breadcrumbs li code{padding:5px;border:none;background:none}.rst-content .wy-breadcrumbs li tt.literal,.wy-breadcrumbs li .rst-content tt.literal,.wy-breadcrumbs li code.literal{color:#404040}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li span.toctree-expand{display:block;float:left;margin-left:-1.2em;font-size:.8em;line-height:1.6em;color:#4d4d4d}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover span.toctree-expand,.wy-menu-vertical li.on a:hover span.toctree-expand{color:grey}.wy-menu-vertical li.current>a span.toctree-expand,.wy-menu-vertical li.on a span.toctree-expand{display:block;font-size:.8em;line-height:1.6em;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover span.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover span.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 span.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 span.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover span.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active span.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p.caption .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p.caption .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li span.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version span.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content img{max-width:100%;height:auto}.rst-content div.figure{margin-bottom:24px}.rst-content div.figure p.caption{font-style:italic}.rst-content div.figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp{user-select:none;pointer-events:none}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content table>caption .headerlink{visibility:hidden;font-size:14px}.rst-content .code-block-caption .headerlink:after,.rst-content .toctree-wrapper>p.caption .headerlink:after,.rst-content dl dt .headerlink:after,.rst-content h1 .headerlink:after,.rst-content h2 .headerlink:after,.rst-content h3 .headerlink:after,.rst-content h4 .headerlink:after,.rst-content h5 .headerlink:after,.rst-content h6 .headerlink:after,.rst-content p.caption .headerlink:after,.rst-content table>caption .headerlink:after{content:"\f0c1";font-family:FontAwesome}.rst-content .code-block-caption:hover .headerlink:after,.rst-content .toctree-wrapper>p.caption:hover .headerlink:after,.rst-content dl dt:hover .headerlink:after,.rst-content h1:hover .headerlink:after,.rst-content h2:hover .headerlink:after,.rst-content h3:hover .headerlink:after,.rst-content h4:hover .headerlink:after,.rst-content h5:hover .headerlink:after,.rst-content h6:hover .headerlink:after,.rst-content p.caption:hover .headerlink:after,.rst-content table>caption:hover .headerlink:after{visibility:visible}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .hlist{width:100%}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl dt span.classifier:before{content:" : "}html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.field-list>dt:after,html.writer-html5 .rst-content dl.footnote>dt:after{content:":"}html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.footnote>dt>span.brackets{margin-right:.5rem}html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{font-style:italic}html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.footnote>dd p,html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{font-size:inherit;line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.field-list)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl:not(.field-list)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.field-list)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl:not(.field-list)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code,html.writer-html4 .rst-content dl:not(.docutils) tt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) code,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) tt{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel{border:1px solid #7fbbe3;background:#e7f2fa;font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/docs/_static/custom.css b/docs/_static/custom.css deleted file mode 100644 index d59f0ee7c..000000000 --- a/docs/_static/custom.css +++ /dev/null @@ -1,204 +0,0 @@ -/* override css for readable.css */ - -/* styles/fonts to match http://mdanalysis.org (see public/css) */ -/* MindsDB --shamrock: #00b06d; */ -/* MindsDB --dark: #2c263f; */ -/* MindsDB --aqua-marine: #4dd9ca; */ -/* MindsDB --wheat: #fedc8c; */ -/* MindsDB --watermelon: #f25c63; */ -/* MindsDB --blueberry: #6751ad; */ -/* MindsDB --white: #ffffff; */ -/* MindsDB --slate-grey: #5d6970; */ - -body { - font-family: 'PT Sans', Helvetica, Arial, 'sans-serif'; - font-size: 17px; -} - -div.body { - color: #000000; -} - -div.sphinxsidebar a:hover { - text-decoration: none !important; -} - -div.sphinxsidebar p { - color: #2c263f; -} - -/* Home MDAnalysis colour */ -.wy-side-nav-search > a { - color: #343131; -} - -/* Side MDAnalysis version colour */ -.wy-side-nav-search > div.version { - color: #2c263f; -} - -/* Menubar caption colour */ -div.wy-menu-vertical span.caption-text { - color: #00b06d; -} - -/* Mobile layout menubar option */ -nav.wy-nav-top { - background: #343131; -} - -/* Menu search bar outline (default blue) */ -.wy-side-nav-search input[type="text"] { - border-color: #2c263f; -} - - -/* -- body styles --------------------------------------------------------- */ - -/* Different coloured links for sidebar vs body) */ -div.rst-content a { - color: #00b06d; - text-decoration: none; -} - -div.rst-content a:visited { - color: #00b06d; -} - -a:hover { - color: #00b06d !important; - text-decoration: underline; -} - - -pre, tt, code { - font-family: Menlo, Monaco, 'Courier New', monospace -} - - -div.body h1 { - font-weight: bolder; -} - -a.headerlink { - color: #2c263f; - font-size: 0.8em; - padding: 0 4px 0 4px; - text-decoration: none; -} - -a.headerlink:hover { - background-color: #2c263f; - color: #fff; -} - -/* ------- admonition boxes ------- */ - -div.admonition { - margin: 10px 0px; - padding: 10px 10px; -} - -div.admonition p.admonition-title { - font-size: 100%; - font-weight: bolder; -} - -/* ----- Tables ----- */ - -/* override table width restrictions */ -/* wrap tables instead of scrolling */ -@media screen and (min-width: 767px) { - - .wy-table-responsive table td, .wy-table-responsive table th { - /* !important prevents the common CSS stylesheets from overriding - this as on RTD they are loaded after this stylesheet */ - white-space: normal !important; - } - - .wy-table-responsive { - overflow: visible !important; - max-width: 100% !important; - } - } - -/* ----- Field lists ------ */ - -.section > dl.field-list { - display: flex; - flex-wrap: wrap; - margin: 0; - padding: 0; -} - -dl.field-list > dt::after { - content: ":"; -} - -.rst-content dl:not(.docutils) dt { - background: none; - color: #000000; - border-top: none; -} - -.section > dl.field-list dt { - margin: 0; - padding: 0; - flex-basis: 20%; - display: block; -} - -.section > dl.field-list > dd { - flex-basis: 70%; - margin: 0; -} - -.section > dl.field-list > dd p { - margin: 0; -} - -/* ----- MDAnalysis coloured elements ------ */ - -.rst-content dl.class dt, .rst-content dl.function dt { - color: #ca6500; - background: #FFEBD0; - border-top: solid 3px #00b06d; -} - -.rst-content .viewcode-link, .rst-content .viewcode-back { - color: #2c263f; -} - -.rst-content .guilabel { - background: #efefef; - border: 1px solid #2c263f; -} - - -.rst-content .seealso p.admonition-title { - background: #2c263f; -} - -.rst-content .seealso { - background: #e3e3e3; -} - -.rst-content .error p.admonition-title, .rst-content .warning p.admonition-title { - background: #F45F4B; -} - -.rst-content .error, .rst-content .warning { - background: #FFEEED; -} - -.rst-content .caution p.admonition-title, .rst-content .note p.admonition-title, .rst-content .important p.admonition-title { - background: #00b06d; -} - -.rst-content .caution, .rst-content .note, .rst-content .important { - background: #FFEBD0; -} - -.rst-content code:not(.xref).literal { - color: #ca6500; -} diff --git a/docs/_static/doctools.js b/docs/_static/doctools.js deleted file mode 100644 index 8cbf1b161..000000000 --- a/docs/_static/doctools.js +++ /dev/null @@ -1,323 +0,0 @@ -/* - * doctools.js - * ~~~~~~~~~~~ - * - * Sphinx JavaScript utilities for all documentation. - * - * :copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/** - * select a different prefix for underscore - */ -$u = _.noConflict(); - -/** - * make the code below compatible with browsers without - * an installed firebug like debugger -if (!window.console || !console.firebug) { - var names = ["log", "debug", "info", "warn", "error", "assert", "dir", - "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", - "profile", "profileEnd"]; - window.console = {}; - for (var i = 0; i < names.length; ++i) - window.console[names[i]] = function() {}; -} - */ - -/** - * small helper function to urldecode strings - * - * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL - */ -jQuery.urldecode = function(x) { - if (!x) { - return x - } - return decodeURIComponent(x.replace(/\+/g, ' ')); -}; - -/** - * small helper function to urlencode strings - */ -jQuery.urlencode = encodeURIComponent; - -/** - * This function returns the parsed url parameters of the - * current request. Multiple values per key are supported, - * it will always return arrays of strings for the value parts. - */ -jQuery.getQueryParameters = function(s) { - if (typeof s === 'undefined') - s = document.location.search; - var parts = s.substr(s.indexOf('?') + 1).split('&'); - var result = {}; - for (var i = 0; i < parts.length; i++) { - var tmp = parts[i].split('=', 2); - var key = jQuery.urldecode(tmp[0]); - var value = jQuery.urldecode(tmp[1]); - if (key in result) - result[key].push(value); - else - result[key] = [value]; - } - return result; -}; - -/** - * highlight a given string on a jquery object by wrapping it in - * span elements with the given class name. - */ -jQuery.fn.highlightText = function(text, className) { - function highlight(node, addItems) { - if (node.nodeType === 3) { - var val = node.nodeValue; - var pos = val.toLowerCase().indexOf(text); - if (pos >= 0 && - !jQuery(node.parentNode).hasClass(className) && - !jQuery(node.parentNode).hasClass("nohighlight")) { - var span; - var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); - if (isInSVG) { - span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); - } else { - span = document.createElement("span"); - span.className = className; - } - span.appendChild(document.createTextNode(val.substr(pos, text.length))); - node.parentNode.insertBefore(span, node.parentNode.insertBefore( - document.createTextNode(val.substr(pos + text.length)), - node.nextSibling)); - node.nodeValue = val.substr(0, pos); - if (isInSVG) { - var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); - var bbox = node.parentElement.getBBox(); - rect.x.baseVal.value = bbox.x; - rect.y.baseVal.value = bbox.y; - rect.width.baseVal.value = bbox.width; - rect.height.baseVal.value = bbox.height; - rect.setAttribute('class', className); - addItems.push({ - "parent": node.parentNode, - "target": rect}); - } - } - } - else if (!jQuery(node).is("button, select, textarea")) { - jQuery.each(node.childNodes, function() { - highlight(this, addItems); - }); - } - } - var addItems = []; - var result = this.each(function() { - highlight(this, addItems); - }); - for (var i = 0; i < addItems.length; ++i) { - jQuery(addItems[i].parent).before(addItems[i].target); - } - return result; -}; - -/* - * backward compatibility for jQuery.browser - * This will be supported until firefox bug is fixed. - */ -if (!jQuery.browser) { - jQuery.uaMatch = function(ua) { - ua = ua.toLowerCase(); - - var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || - /(webkit)[ \/]([\w.]+)/.exec(ua) || - /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || - /(msie) ([\w.]+)/.exec(ua) || - ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || - []; - - return { - browser: match[ 1 ] || "", - version: match[ 2 ] || "0" - }; - }; - jQuery.browser = {}; - jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; -} - -/** - * Small JavaScript module for the documentation. - */ -var Documentation = { - - init : function() { - this.fixFirefoxAnchorBug(); - this.highlightSearchWords(); - this.initIndexTable(); - if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) { - this.initOnKeyListeners(); - } - }, - - /** - * i18n support - */ - TRANSLATIONS : {}, - PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, - LOCALE : 'unknown', - - // gettext and ngettext don't access this so that the functions - // can safely bound to a different name (_ = Documentation.gettext) - gettext : function(string) { - var translated = Documentation.TRANSLATIONS[string]; - if (typeof translated === 'undefined') - return string; - return (typeof translated === 'string') ? translated : translated[0]; - }, - - ngettext : function(singular, plural, n) { - var translated = Documentation.TRANSLATIONS[singular]; - if (typeof translated === 'undefined') - return (n == 1) ? singular : plural; - return translated[Documentation.PLURALEXPR(n)]; - }, - - addTranslations : function(catalog) { - for (var key in catalog.messages) - this.TRANSLATIONS[key] = catalog.messages[key]; - this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); - this.LOCALE = catalog.locale; - }, - - /** - * add context elements like header anchor links - */ - addContextElements : function() { - $('div[id] > :header:first').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this headline')). - appendTo(this); - }); - $('dt[id]').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this definition')). - appendTo(this); - }); - }, - - /** - * workaround a firefox stupidity - * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 - */ - fixFirefoxAnchorBug : function() { - if (document.location.hash && $.browser.mozilla) - window.setTimeout(function() { - document.location.href += ''; - }, 10); - }, - - /** - * highlight the search words provided in the url in the text - */ - highlightSearchWords : function() { - var params = $.getQueryParameters(); - var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; - if (terms.length) { - var body = $('div.body'); - if (!body.length) { - body = $('body'); - } - window.setTimeout(function() { - $.each(terms, function() { - body.highlightText(this.toLowerCase(), 'highlighted'); - }); - }, 10); - $('') - .appendTo($('#searchbox')); - } - }, - - /** - * init the domain index toggle buttons - */ - initIndexTable : function() { - var togglers = $('img.toggler').click(function() { - var src = $(this).attr('src'); - var idnum = $(this).attr('id').substr(7); - $('tr.cg-' + idnum).toggle(); - if (src.substr(-9) === 'minus.png') - $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); - else - $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); - }).css('display', ''); - if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { - togglers.click(); - } - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords : function() { - $('#searchbox .highlight-link').fadeOut(300); - $('span.highlighted').removeClass('highlighted'); - }, - - /** - * make the url absolute - */ - makeURL : function(relativeURL) { - return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; - }, - - /** - * get the current relative url - */ - getCurrentURL : function() { - var path = document.location.pathname; - var parts = path.split(/\//); - $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { - if (this === '..') - parts.pop(); - }); - var url = parts.join('/'); - return path.substring(url.lastIndexOf('/') + 1, path.length - 1); - }, - - initOnKeyListeners: function() { - $(document).keydown(function(event) { - var activeElementType = document.activeElement.tagName; - // don't navigate when in search box, textarea, dropdown or button - if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' - && activeElementType !== 'BUTTON' && !event.altKey && !event.ctrlKey && !event.metaKey - && !event.shiftKey) { - switch (event.keyCode) { - case 37: // left - var prevHref = $('link[rel="prev"]').prop('href'); - if (prevHref) { - window.location.href = prevHref; - return false; - } - break; - case 39: // right - var nextHref = $('link[rel="next"]').prop('href'); - if (nextHref) { - window.location.href = nextHref; - return false; - } - break; - } - } - }); - } -}; - -// quick alias for translations -_ = Documentation.gettext; - -$(document).ready(function() { - Documentation.init(); -}); diff --git a/docs/_static/documentation_options.js b/docs/_static/documentation_options.js deleted file mode 100644 index 713a8d72f..000000000 --- a/docs/_static/documentation_options.js +++ /dev/null @@ -1,12 +0,0 @@ -var DOCUMENTATION_OPTIONS = { - URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), - VERSION: '1.6.1', - LANGUAGE: 'None', - COLLAPSE_INDEX: false, - BUILDER: 'html', - FILE_SUFFIX: '.html', - LINK_SUFFIX: '.html', - HAS_SOURCE: true, - SOURCELINK_SUFFIX: '.txt', - NAVIGATION_WITH_KEYS: false -}; \ No newline at end of file diff --git a/docs/_static/file.png b/docs/_static/file.png deleted file mode 100644 index a858a410e..000000000 Binary files a/docs/_static/file.png and /dev/null differ diff --git a/docs/_static/fonts/Inconsolata-Bold.ttf b/docs/_static/fonts/Inconsolata-Bold.ttf deleted file mode 100644 index 809c1f582..000000000 Binary files a/docs/_static/fonts/Inconsolata-Bold.ttf and /dev/null differ diff --git a/docs/_static/fonts/Inconsolata-Regular.ttf b/docs/_static/fonts/Inconsolata-Regular.ttf deleted file mode 100644 index fc981ce7a..000000000 Binary files a/docs/_static/fonts/Inconsolata-Regular.ttf and /dev/null differ diff --git a/docs/_static/fonts/Inconsolata.ttf b/docs/_static/fonts/Inconsolata.ttf deleted file mode 100644 index 4b8a36d24..000000000 Binary files a/docs/_static/fonts/Inconsolata.ttf and /dev/null differ diff --git a/docs/_static/fonts/Lato-Bold.ttf b/docs/_static/fonts/Lato-Bold.ttf deleted file mode 100644 index 1d23c7066..000000000 Binary files a/docs/_static/fonts/Lato-Bold.ttf and /dev/null differ diff --git a/docs/_static/fonts/Lato-Regular.ttf b/docs/_static/fonts/Lato-Regular.ttf deleted file mode 100644 index 0f3d0f837..000000000 Binary files a/docs/_static/fonts/Lato-Regular.ttf and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bold.eot b/docs/_static/fonts/Lato/lato-bold.eot deleted file mode 100644 index 3361183a4..000000000 Binary files a/docs/_static/fonts/Lato/lato-bold.eot and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bold.ttf b/docs/_static/fonts/Lato/lato-bold.ttf deleted file mode 100644 index 29f691d5e..000000000 Binary files a/docs/_static/fonts/Lato/lato-bold.ttf and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bold.woff b/docs/_static/fonts/Lato/lato-bold.woff deleted file mode 100644 index c6dff51f0..000000000 Binary files a/docs/_static/fonts/Lato/lato-bold.woff and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bold.woff2 b/docs/_static/fonts/Lato/lato-bold.woff2 deleted file mode 100644 index bb195043c..000000000 Binary files a/docs/_static/fonts/Lato/lato-bold.woff2 and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.eot b/docs/_static/fonts/Lato/lato-bolditalic.eot deleted file mode 100644 index 3d4154936..000000000 Binary files a/docs/_static/fonts/Lato/lato-bolditalic.eot and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.ttf b/docs/_static/fonts/Lato/lato-bolditalic.ttf deleted file mode 100644 index f402040b3..000000000 Binary files a/docs/_static/fonts/Lato/lato-bolditalic.ttf and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.woff b/docs/_static/fonts/Lato/lato-bolditalic.woff deleted file mode 100644 index 88ad05b9f..000000000 Binary files a/docs/_static/fonts/Lato/lato-bolditalic.woff and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.woff2 b/docs/_static/fonts/Lato/lato-bolditalic.woff2 deleted file mode 100644 index c4e3d804b..000000000 Binary files a/docs/_static/fonts/Lato/lato-bolditalic.woff2 and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-italic.eot b/docs/_static/fonts/Lato/lato-italic.eot deleted file mode 100644 index 3f826421a..000000000 Binary files a/docs/_static/fonts/Lato/lato-italic.eot and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-italic.ttf b/docs/_static/fonts/Lato/lato-italic.ttf deleted file mode 100644 index b4bfc9b24..000000000 Binary files a/docs/_static/fonts/Lato/lato-italic.ttf and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-italic.woff b/docs/_static/fonts/Lato/lato-italic.woff deleted file mode 100644 index 76114bc03..000000000 Binary files a/docs/_static/fonts/Lato/lato-italic.woff and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-italic.woff2 b/docs/_static/fonts/Lato/lato-italic.woff2 deleted file mode 100644 index 3404f37e2..000000000 Binary files a/docs/_static/fonts/Lato/lato-italic.woff2 and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-regular.eot b/docs/_static/fonts/Lato/lato-regular.eot deleted file mode 100644 index 11e3f2a5f..000000000 Binary files a/docs/_static/fonts/Lato/lato-regular.eot and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-regular.ttf b/docs/_static/fonts/Lato/lato-regular.ttf deleted file mode 100644 index 74decd9eb..000000000 Binary files a/docs/_static/fonts/Lato/lato-regular.ttf and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-regular.woff b/docs/_static/fonts/Lato/lato-regular.woff deleted file mode 100644 index ae1307ff5..000000000 Binary files a/docs/_static/fonts/Lato/lato-regular.woff and /dev/null differ diff --git a/docs/_static/fonts/Lato/lato-regular.woff2 b/docs/_static/fonts/Lato/lato-regular.woff2 deleted file mode 100644 index 3bf984332..000000000 Binary files a/docs/_static/fonts/Lato/lato-regular.woff2 and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab-Bold.ttf b/docs/_static/fonts/RobotoSlab-Bold.ttf deleted file mode 100644 index df5d1df27..000000000 Binary files a/docs/_static/fonts/RobotoSlab-Bold.ttf and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab-Regular.ttf b/docs/_static/fonts/RobotoSlab-Regular.ttf deleted file mode 100644 index eb52a7907..000000000 Binary files a/docs/_static/fonts/RobotoSlab-Regular.ttf and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot deleted file mode 100644 index 79dc8efed..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf deleted file mode 100644 index df5d1df27..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff deleted file mode 100644 index 6cb600001..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 deleted file mode 100644 index 7059e2314..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot deleted file mode 100644 index 2f7ca78a1..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf deleted file mode 100644 index eb52a7907..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff deleted file mode 100644 index f815f63f9..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff and /dev/null differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 deleted file mode 100644 index f2c76e5bd..000000000 Binary files a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 and /dev/null differ diff --git a/docs/_static/fonts/fontawesome-webfont.eot b/docs/_static/fonts/fontawesome-webfont.eot deleted file mode 100644 index e9f60ca95..000000000 Binary files a/docs/_static/fonts/fontawesome-webfont.eot and /dev/null differ diff --git a/docs/_static/fonts/fontawesome-webfont.svg b/docs/_static/fonts/fontawesome-webfont.svg deleted file mode 100644 index 855c845e5..000000000 --- a/docs/_static/fonts/fontawesome-webfont.svg +++ /dev/null @@ -1,2671 +0,0 @@ - - - - -Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 - By ,,, -Copyright Dave Gandy 2016. All rights reserved. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/_static/fonts/fontawesome-webfont.ttf b/docs/_static/fonts/fontawesome-webfont.ttf deleted file mode 100644 index 35acda2fa..000000000 Binary files a/docs/_static/fonts/fontawesome-webfont.ttf and /dev/null differ diff --git a/docs/_static/fonts/fontawesome-webfont.woff b/docs/_static/fonts/fontawesome-webfont.woff deleted file mode 100644 index 400014a4b..000000000 Binary files a/docs/_static/fonts/fontawesome-webfont.woff and /dev/null differ diff --git a/docs/_static/fonts/fontawesome-webfont.woff2 b/docs/_static/fonts/fontawesome-webfont.woff2 deleted file mode 100644 index 4d13fc604..000000000 Binary files a/docs/_static/fonts/fontawesome-webfont.woff2 and /dev/null differ diff --git a/docs/_static/graphviz.css b/docs/_static/graphviz.css deleted file mode 100644 index b340734c7..000000000 --- a/docs/_static/graphviz.css +++ /dev/null @@ -1,19 +0,0 @@ -/* - * graphviz.css - * ~~~~~~~~~~~~ - * - * Sphinx stylesheet -- graphviz extension. - * - * :copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -img.graphviz { - border: 0; - max-width: 100%; -} - -object.graphviz { - max-width: 100%; -} diff --git a/docs/_static/jquery-3.5.1.js b/docs/_static/jquery-3.5.1.js deleted file mode 100644 index 50937333b..000000000 --- a/docs/_static/jquery-3.5.1.js +++ /dev/null @@ -1,10872 +0,0 @@ -/*! - * jQuery JavaScript Library v3.5.1 - * https://jquery.com/ - * - * Includes Sizzle.js - * https://sizzlejs.com/ - * - * Copyright JS Foundation and other contributors - * Released under the MIT license - * https://jquery.org/license - * - * Date: 2020-05-04T22:49Z - */ -( function( global, factory ) { - - "use strict"; - - if ( typeof module === "object" && typeof module.exports === "object" ) { - - // For CommonJS and CommonJS-like environments where a proper `window` - // is present, execute the factory and get jQuery. - // For environments that do not have a `window` with a `document` - // (such as Node.js), expose a factory as module.exports. - // This accentuates the need for the creation of a real `window`. - // e.g. var jQuery = require("jquery")(window); - // See ticket #14549 for more info. - module.exports = global.document ? - factory( global, true ) : - function( w ) { - if ( !w.document ) { - throw new Error( "jQuery requires a window with a document" ); - } - return factory( w ); - }; - } else { - factory( global ); - } - -// Pass this if window is not defined yet -} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { - -// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 -// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode -// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common -// enough that all such attempts are guarded in a try block. -"use strict"; - -var arr = []; - -var getProto = Object.getPrototypeOf; - -var slice = arr.slice; - -var flat = arr.flat ? function( array ) { - return arr.flat.call( array ); -} : function( array ) { - return arr.concat.apply( [], array ); -}; - - -var push = arr.push; - -var indexOf = arr.indexOf; - -var class2type = {}; - -var toString = class2type.toString; - -var hasOwn = class2type.hasOwnProperty; - -var fnToString = hasOwn.toString; - -var ObjectFunctionString = fnToString.call( Object ); - -var support = {}; - -var isFunction = function isFunction( obj ) { - - // Support: Chrome <=57, Firefox <=52 - // In some browsers, typeof returns "function" for HTML elements - // (i.e., `typeof document.createElement( "object" ) === "function"`). - // We don't want to classify *any* DOM node as a function. - return typeof obj === "function" && typeof obj.nodeType !== "number"; - }; - - -var isWindow = function isWindow( obj ) { - return obj != null && obj === obj.window; - }; - - -var document = window.document; - - - - var preservedScriptAttributes = { - type: true, - src: true, - nonce: true, - noModule: true - }; - - function DOMEval( code, node, doc ) { - doc = doc || document; - - var i, val, - script = doc.createElement( "script" ); - - script.text = code; - if ( node ) { - for ( i in preservedScriptAttributes ) { - - // Support: Firefox 64+, Edge 18+ - // Some browsers don't support the "nonce" property on scripts. - // On the other hand, just using `getAttribute` is not enough as - // the `nonce` attribute is reset to an empty string whenever it - // becomes browsing-context connected. - // See https://github.com/whatwg/html/issues/2369 - // See https://html.spec.whatwg.org/#nonce-attributes - // The `node.getAttribute` check was added for the sake of - // `jQuery.globalEval` so that it can fake a nonce-containing node - // via an object. - val = node[ i ] || node.getAttribute && node.getAttribute( i ); - if ( val ) { - script.setAttribute( i, val ); - } - } - } - doc.head.appendChild( script ).parentNode.removeChild( script ); - } - - -function toType( obj ) { - if ( obj == null ) { - return obj + ""; - } - - // Support: Android <=2.3 only (functionish RegExp) - return typeof obj === "object" || typeof obj === "function" ? - class2type[ toString.call( obj ) ] || "object" : - typeof obj; -} -/* global Symbol */ -// Defining this global in .eslintrc.json would create a danger of using the global -// unguarded in another place, it seems safer to define global only for this module - - - -var - version = "3.5.1", - - // Define a local copy of jQuery - jQuery = function( selector, context ) { - - // The jQuery object is actually just the init constructor 'enhanced' - // Need init if jQuery is called (just allow error to be thrown if not included) - return new jQuery.fn.init( selector, context ); - }; - -jQuery.fn = jQuery.prototype = { - - // The current version of jQuery being used - jquery: version, - - constructor: jQuery, - - // The default length of a jQuery object is 0 - length: 0, - - toArray: function() { - return slice.call( this ); - }, - - // Get the Nth element in the matched element set OR - // Get the whole matched element set as a clean array - get: function( num ) { - - // Return all the elements in a clean array - if ( num == null ) { - return slice.call( this ); - } - - // Return just the one element from the set - return num < 0 ? this[ num + this.length ] : this[ num ]; - }, - - // Take an array of elements and push it onto the stack - // (returning the new matched element set) - pushStack: function( elems ) { - - // Build a new jQuery matched element set - var ret = jQuery.merge( this.constructor(), elems ); - - // Add the old object onto the stack (as a reference) - ret.prevObject = this; - - // Return the newly-formed element set - return ret; - }, - - // Execute a callback for every element in the matched set. - each: function( callback ) { - return jQuery.each( this, callback ); - }, - - map: function( callback ) { - return this.pushStack( jQuery.map( this, function( elem, i ) { - return callback.call( elem, i, elem ); - } ) ); - }, - - slice: function() { - return this.pushStack( slice.apply( this, arguments ) ); - }, - - first: function() { - return this.eq( 0 ); - }, - - last: function() { - return this.eq( -1 ); - }, - - even: function() { - return this.pushStack( jQuery.grep( this, function( _elem, i ) { - return ( i + 1 ) % 2; - } ) ); - }, - - odd: function() { - return this.pushStack( jQuery.grep( this, function( _elem, i ) { - return i % 2; - } ) ); - }, - - eq: function( i ) { - var len = this.length, - j = +i + ( i < 0 ? len : 0 ); - return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); - }, - - end: function() { - return this.prevObject || this.constructor(); - }, - - // For internal use only. - // Behaves like an Array's method, not like a jQuery method. - push: push, - sort: arr.sort, - splice: arr.splice -}; - -jQuery.extend = jQuery.fn.extend = function() { - var options, name, src, copy, copyIsArray, clone, - target = arguments[ 0 ] || {}, - i = 1, - length = arguments.length, - deep = false; - - // Handle a deep copy situation - if ( typeof target === "boolean" ) { - deep = target; - - // Skip the boolean and the target - target = arguments[ i ] || {}; - i++; - } - - // Handle case when target is a string or something (possible in deep copy) - if ( typeof target !== "object" && !isFunction( target ) ) { - target = {}; - } - - // Extend jQuery itself if only one argument is passed - if ( i === length ) { - target = this; - i--; - } - - for ( ; i < length; i++ ) { - - // Only deal with non-null/undefined values - if ( ( options = arguments[ i ] ) != null ) { - - // Extend the base object - for ( name in options ) { - copy = options[ name ]; - - // Prevent Object.prototype pollution - // Prevent never-ending loop - if ( name === "__proto__" || target === copy ) { - continue; - } - - // Recurse if we're merging plain objects or arrays - if ( deep && copy && ( jQuery.isPlainObject( copy ) || - ( copyIsArray = Array.isArray( copy ) ) ) ) { - src = target[ name ]; - - // Ensure proper type for the source value - if ( copyIsArray && !Array.isArray( src ) ) { - clone = []; - } else if ( !copyIsArray && !jQuery.isPlainObject( src ) ) { - clone = {}; - } else { - clone = src; - } - copyIsArray = false; - - // Never move original objects, clone them - target[ name ] = jQuery.extend( deep, clone, copy ); - - // Don't bring in undefined values - } else if ( copy !== undefined ) { - target[ name ] = copy; - } - } - } - } - - // Return the modified object - return target; -}; - -jQuery.extend( { - - // Unique for each copy of jQuery on the page - expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), - - // Assume jQuery is ready without the ready module - isReady: true, - - error: function( msg ) { - throw new Error( msg ); - }, - - noop: function() {}, - - isPlainObject: function( obj ) { - var proto, Ctor; - - // Detect obvious negatives - // Use toString instead of jQuery.type to catch host objects - if ( !obj || toString.call( obj ) !== "[object Object]" ) { - return false; - } - - proto = getProto( obj ); - - // Objects with no prototype (e.g., `Object.create( null )`) are plain - if ( !proto ) { - return true; - } - - // Objects with prototype are plain iff they were constructed by a global Object function - Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; - return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; - }, - - isEmptyObject: function( obj ) { - var name; - - for ( name in obj ) { - return false; - } - return true; - }, - - // Evaluates a script in a provided context; falls back to the global one - // if not specified. - globalEval: function( code, options, doc ) { - DOMEval( code, { nonce: options && options.nonce }, doc ); - }, - - each: function( obj, callback ) { - var length, i = 0; - - if ( isArrayLike( obj ) ) { - length = obj.length; - for ( ; i < length; i++ ) { - if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { - break; - } - } - } else { - for ( i in obj ) { - if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { - break; - } - } - } - - return obj; - }, - - // results is for internal usage only - makeArray: function( arr, results ) { - var ret = results || []; - - if ( arr != null ) { - if ( isArrayLike( Object( arr ) ) ) { - jQuery.merge( ret, - typeof arr === "string" ? - [ arr ] : arr - ); - } else { - push.call( ret, arr ); - } - } - - return ret; - }, - - inArray: function( elem, arr, i ) { - return arr == null ? -1 : indexOf.call( arr, elem, i ); - }, - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - merge: function( first, second ) { - var len = +second.length, - j = 0, - i = first.length; - - for ( ; j < len; j++ ) { - first[ i++ ] = second[ j ]; - } - - first.length = i; - - return first; - }, - - grep: function( elems, callback, invert ) { - var callbackInverse, - matches = [], - i = 0, - length = elems.length, - callbackExpect = !invert; - - // Go through the array, only saving the items - // that pass the validator function - for ( ; i < length; i++ ) { - callbackInverse = !callback( elems[ i ], i ); - if ( callbackInverse !== callbackExpect ) { - matches.push( elems[ i ] ); - } - } - - return matches; - }, - - // arg is for internal usage only - map: function( elems, callback, arg ) { - var length, value, - i = 0, - ret = []; - - // Go through the array, translating each of the items to their new values - if ( isArrayLike( elems ) ) { - length = elems.length; - for ( ; i < length; i++ ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret.push( value ); - } - } - - // Go through every key on the object, - } else { - for ( i in elems ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret.push( value ); - } - } - } - - // Flatten any nested arrays - return flat( ret ); - }, - - // A global GUID counter for objects - guid: 1, - - // jQuery.support is not used in Core but other projects attach their - // properties to it so it needs to exist. - support: support -} ); - -if ( typeof Symbol === "function" ) { - jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; -} - -// Populate the class2type map -jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), -function( _i, name ) { - class2type[ "[object " + name + "]" ] = name.toLowerCase(); -} ); - -function isArrayLike( obj ) { - - // Support: real iOS 8.2 only (not reproducible in simulator) - // `in` check used to prevent JIT error (gh-2145) - // hasOwn isn't used here due to false negatives - // regarding Nodelist length in IE - var length = !!obj && "length" in obj && obj.length, - type = toType( obj ); - - if ( isFunction( obj ) || isWindow( obj ) ) { - return false; - } - - return type === "array" || length === 0 || - typeof length === "number" && length > 0 && ( length - 1 ) in obj; -} -var Sizzle = -/*! - * Sizzle CSS Selector Engine v2.3.5 - * https://sizzlejs.com/ - * - * Copyright JS Foundation and other contributors - * Released under the MIT license - * https://js.foundation/ - * - * Date: 2020-03-14 - */ -( function( window ) { -var i, - support, - Expr, - getText, - isXML, - tokenize, - compile, - select, - outermostContext, - sortInput, - hasDuplicate, - - // Local document vars - setDocument, - document, - docElem, - documentIsHTML, - rbuggyQSA, - rbuggyMatches, - matches, - contains, - - // Instance-specific data - expando = "sizzle" + 1 * new Date(), - preferredDoc = window.document, - dirruns = 0, - done = 0, - classCache = createCache(), - tokenCache = createCache(), - compilerCache = createCache(), - nonnativeSelectorCache = createCache(), - sortOrder = function( a, b ) { - if ( a === b ) { - hasDuplicate = true; - } - return 0; - }, - - // Instance methods - hasOwn = ( {} ).hasOwnProperty, - arr = [], - pop = arr.pop, - pushNative = arr.push, - push = arr.push, - slice = arr.slice, - - // Use a stripped-down indexOf as it's faster than native - // https://jsperf.com/thor-indexof-vs-for/5 - indexOf = function( list, elem ) { - var i = 0, - len = list.length; - for ( ; i < len; i++ ) { - if ( list[ i ] === elem ) { - return i; - } - } - return -1; - }, - - booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|" + - "ismap|loop|multiple|open|readonly|required|scoped", - - // Regular expressions - - // http://www.w3.org/TR/css3-selectors/#whitespace - whitespace = "[\\x20\\t\\r\\n\\f]", - - // https://www.w3.org/TR/css-syntax-3/#ident-token-diagram - identifier = "(?:\\\\[\\da-fA-F]{1,6}" + whitespace + - "?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+", - - // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors - attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + - - // Operator (capture 2) - "*([*^$|!~]?=)" + whitespace + - - // "Attribute values must be CSS identifiers [capture 5] - // or strings [capture 3 or capture 4]" - "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + - whitespace + "*\\]", - - pseudos = ":(" + identifier + ")(?:\\((" + - - // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: - // 1. quoted (capture 3; capture 4 or capture 5) - "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + - - // 2. simple (capture 6) - "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + - - // 3. anything else (capture 2) - ".*" + - ")\\)|)", - - // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter - rwhitespace = new RegExp( whitespace + "+", "g" ), - rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + - whitespace + "+$", "g" ), - - rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), - rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + - "*" ), - rdescend = new RegExp( whitespace + "|>" ), - - rpseudo = new RegExp( pseudos ), - ridentifier = new RegExp( "^" + identifier + "$" ), - - matchExpr = { - "ID": new RegExp( "^#(" + identifier + ")" ), - "CLASS": new RegExp( "^\\.(" + identifier + ")" ), - "TAG": new RegExp( "^(" + identifier + "|[*])" ), - "ATTR": new RegExp( "^" + attributes ), - "PSEUDO": new RegExp( "^" + pseudos ), - "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + - whitespace + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + - whitespace + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), - "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), - - // For use in libraries implementing .is() - // We use this for POS matching in `select` - "needsContext": new RegExp( "^" + whitespace + - "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + whitespace + - "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) - }, - - rhtml = /HTML$/i, - rinputs = /^(?:input|select|textarea|button)$/i, - rheader = /^h\d$/i, - - rnative = /^[^{]+\{\s*\[native \w/, - - // Easily-parseable/retrievable ID or TAG or CLASS selectors - rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, - - rsibling = /[+~]/, - - // CSS escapes - // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters - runescape = new RegExp( "\\\\[\\da-fA-F]{1,6}" + whitespace + "?|\\\\([^\\r\\n\\f])", "g" ), - funescape = function( escape, nonHex ) { - var high = "0x" + escape.slice( 1 ) - 0x10000; - - return nonHex ? - - // Strip the backslash prefix from a non-hex escape sequence - nonHex : - - // Replace a hexadecimal escape sequence with the encoded Unicode code point - // Support: IE <=11+ - // For values outside the Basic Multilingual Plane (BMP), manually construct a - // surrogate pair - high < 0 ? - String.fromCharCode( high + 0x10000 ) : - String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); - }, - - // CSS string/identifier serialization - // https://drafts.csswg.org/cssom/#common-serializing-idioms - rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, - fcssescape = function( ch, asCodePoint ) { - if ( asCodePoint ) { - - // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER - if ( ch === "\0" ) { - return "\uFFFD"; - } - - // Control characters and (dependent upon position) numbers get escaped as code points - return ch.slice( 0, -1 ) + "\\" + - ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; - } - - // Other potentially-special ASCII characters get backslash-escaped - return "\\" + ch; - }, - - // Used for iframes - // See setDocument() - // Removing the function wrapper causes a "Permission Denied" - // error in IE - unloadHandler = function() { - setDocument(); - }, - - inDisabledFieldset = addCombinator( - function( elem ) { - return elem.disabled === true && elem.nodeName.toLowerCase() === "fieldset"; - }, - { dir: "parentNode", next: "legend" } - ); - -// Optimize for push.apply( _, NodeList ) -try { - push.apply( - ( arr = slice.call( preferredDoc.childNodes ) ), - preferredDoc.childNodes - ); - - // Support: Android<4.0 - // Detect silently failing push.apply - // eslint-disable-next-line no-unused-expressions - arr[ preferredDoc.childNodes.length ].nodeType; -} catch ( e ) { - push = { apply: arr.length ? - - // Leverage slice if possible - function( target, els ) { - pushNative.apply( target, slice.call( els ) ); - } : - - // Support: IE<9 - // Otherwise append directly - function( target, els ) { - var j = target.length, - i = 0; - - // Can't trust NodeList.length - while ( ( target[ j++ ] = els[ i++ ] ) ) {} - target.length = j - 1; - } - }; -} - -function Sizzle( selector, context, results, seed ) { - var m, i, elem, nid, match, groups, newSelector, - newContext = context && context.ownerDocument, - - // nodeType defaults to 9, since context defaults to document - nodeType = context ? context.nodeType : 9; - - results = results || []; - - // Return early from calls with invalid selector or context - if ( typeof selector !== "string" || !selector || - nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { - - return results; - } - - // Try to shortcut find operations (as opposed to filters) in HTML documents - if ( !seed ) { - setDocument( context ); - context = context || document; - - if ( documentIsHTML ) { - - // If the selector is sufficiently simple, try using a "get*By*" DOM method - // (excepting DocumentFragment context, where the methods don't exist) - if ( nodeType !== 11 && ( match = rquickExpr.exec( selector ) ) ) { - - // ID selector - if ( ( m = match[ 1 ] ) ) { - - // Document context - if ( nodeType === 9 ) { - if ( ( elem = context.getElementById( m ) ) ) { - - // Support: IE, Opera, Webkit - // TODO: identify versions - // getElementById can match elements by name instead of ID - if ( elem.id === m ) { - results.push( elem ); - return results; - } - } else { - return results; - } - - // Element context - } else { - - // Support: IE, Opera, Webkit - // TODO: identify versions - // getElementById can match elements by name instead of ID - if ( newContext && ( elem = newContext.getElementById( m ) ) && - contains( context, elem ) && - elem.id === m ) { - - results.push( elem ); - return results; - } - } - - // Type selector - } else if ( match[ 2 ] ) { - push.apply( results, context.getElementsByTagName( selector ) ); - return results; - - // Class selector - } else if ( ( m = match[ 3 ] ) && support.getElementsByClassName && - context.getElementsByClassName ) { - - push.apply( results, context.getElementsByClassName( m ) ); - return results; - } - } - - // Take advantage of querySelectorAll - if ( support.qsa && - !nonnativeSelectorCache[ selector + " " ] && - ( !rbuggyQSA || !rbuggyQSA.test( selector ) ) && - - // Support: IE 8 only - // Exclude object elements - ( nodeType !== 1 || context.nodeName.toLowerCase() !== "object" ) ) { - - newSelector = selector; - newContext = context; - - // qSA considers elements outside a scoping root when evaluating child or - // descendant combinators, which is not what we want. - // In such cases, we work around the behavior by prefixing every selector in the - // list with an ID selector referencing the scope context. - // The technique has to be used as well when a leading combinator is used - // as such selectors are not recognized by querySelectorAll. - // Thanks to Andrew Dupont for this technique. - if ( nodeType === 1 && - ( rdescend.test( selector ) || rcombinators.test( selector ) ) ) { - - // Expand context for sibling selectors - newContext = rsibling.test( selector ) && testContext( context.parentNode ) || - context; - - // We can use :scope instead of the ID hack if the browser - // supports it & if we're not changing the context. - if ( newContext !== context || !support.scope ) { - - // Capture the context ID, setting it first if necessary - if ( ( nid = context.getAttribute( "id" ) ) ) { - nid = nid.replace( rcssescape, fcssescape ); - } else { - context.setAttribute( "id", ( nid = expando ) ); - } - } - - // Prefix every selector in the list - groups = tokenize( selector ); - i = groups.length; - while ( i-- ) { - groups[ i ] = ( nid ? "#" + nid : ":scope" ) + " " + - toSelector( groups[ i ] ); - } - newSelector = groups.join( "," ); - } - - try { - push.apply( results, - newContext.querySelectorAll( newSelector ) - ); - return results; - } catch ( qsaError ) { - nonnativeSelectorCache( selector, true ); - } finally { - if ( nid === expando ) { - context.removeAttribute( "id" ); - } - } - } - } - } - - // All others - return select( selector.replace( rtrim, "$1" ), context, results, seed ); -} - -/** - * Create key-value caches of limited size - * @returns {function(string, object)} Returns the Object data after storing it on itself with - * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) - * deleting the oldest entry - */ -function createCache() { - var keys = []; - - function cache( key, value ) { - - // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) - if ( keys.push( key + " " ) > Expr.cacheLength ) { - - // Only keep the most recent entries - delete cache[ keys.shift() ]; - } - return ( cache[ key + " " ] = value ); - } - return cache; -} - -/** - * Mark a function for special use by Sizzle - * @param {Function} fn The function to mark - */ -function markFunction( fn ) { - fn[ expando ] = true; - return fn; -} - -/** - * Support testing using an element - * @param {Function} fn Passed the created element and returns a boolean result - */ -function assert( fn ) { - var el = document.createElement( "fieldset" ); - - try { - return !!fn( el ); - } catch ( e ) { - return false; - } finally { - - // Remove from its parent by default - if ( el.parentNode ) { - el.parentNode.removeChild( el ); - } - - // release memory in IE - el = null; - } -} - -/** - * Adds the same handler for all of the specified attrs - * @param {String} attrs Pipe-separated list of attributes - * @param {Function} handler The method that will be applied - */ -function addHandle( attrs, handler ) { - var arr = attrs.split( "|" ), - i = arr.length; - - while ( i-- ) { - Expr.attrHandle[ arr[ i ] ] = handler; - } -} - -/** - * Checks document order of two siblings - * @param {Element} a - * @param {Element} b - * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b - */ -function siblingCheck( a, b ) { - var cur = b && a, - diff = cur && a.nodeType === 1 && b.nodeType === 1 && - a.sourceIndex - b.sourceIndex; - - // Use IE sourceIndex if available on both nodes - if ( diff ) { - return diff; - } - - // Check if b follows a - if ( cur ) { - while ( ( cur = cur.nextSibling ) ) { - if ( cur === b ) { - return -1; - } - } - } - - return a ? 1 : -1; -} - -/** - * Returns a function to use in pseudos for input types - * @param {String} type - */ -function createInputPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for buttons - * @param {String} type - */ -function createButtonPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return ( name === "input" || name === "button" ) && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for :enabled/:disabled - * @param {Boolean} disabled true for :disabled; false for :enabled - */ -function createDisabledPseudo( disabled ) { - - // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable - return function( elem ) { - - // Only certain elements can match :enabled or :disabled - // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled - // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled - if ( "form" in elem ) { - - // Check for inherited disabledness on relevant non-disabled elements: - // * listed form-associated elements in a disabled fieldset - // https://html.spec.whatwg.org/multipage/forms.html#category-listed - // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled - // * option elements in a disabled optgroup - // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled - // All such elements have a "form" property. - if ( elem.parentNode && elem.disabled === false ) { - - // Option elements defer to a parent optgroup if present - if ( "label" in elem ) { - if ( "label" in elem.parentNode ) { - return elem.parentNode.disabled === disabled; - } else { - return elem.disabled === disabled; - } - } - - // Support: IE 6 - 11 - // Use the isDisabled shortcut property to check for disabled fieldset ancestors - return elem.isDisabled === disabled || - - // Where there is no isDisabled, check manually - /* jshint -W018 */ - elem.isDisabled !== !disabled && - inDisabledFieldset( elem ) === disabled; - } - - return elem.disabled === disabled; - - // Try to winnow out elements that can't be disabled before trusting the disabled property. - // Some victims get caught in our net (label, legend, menu, track), but it shouldn't - // even exist on them, let alone have a boolean value. - } else if ( "label" in elem ) { - return elem.disabled === disabled; - } - - // Remaining elements are neither :enabled nor :disabled - return false; - }; -} - -/** - * Returns a function to use in pseudos for positionals - * @param {Function} fn - */ -function createPositionalPseudo( fn ) { - return markFunction( function( argument ) { - argument = +argument; - return markFunction( function( seed, matches ) { - var j, - matchIndexes = fn( [], seed.length, argument ), - i = matchIndexes.length; - - // Match elements found at the specified indexes - while ( i-- ) { - if ( seed[ ( j = matchIndexes[ i ] ) ] ) { - seed[ j ] = !( matches[ j ] = seed[ j ] ); - } - } - } ); - } ); -} - -/** - * Checks a node for validity as a Sizzle context - * @param {Element|Object=} context - * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value - */ -function testContext( context ) { - return context && typeof context.getElementsByTagName !== "undefined" && context; -} - -// Expose support vars for convenience -support = Sizzle.support = {}; - -/** - * Detects XML nodes - * @param {Element|Object} elem An element or a document - * @returns {Boolean} True iff elem is a non-HTML XML node - */ -isXML = Sizzle.isXML = function( elem ) { - var namespace = elem.namespaceURI, - docElem = ( elem.ownerDocument || elem ).documentElement; - - // Support: IE <=8 - // Assume HTML when documentElement doesn't yet exist, such as inside loading iframes - // https://bugs.jquery.com/ticket/4833 - return !rhtml.test( namespace || docElem && docElem.nodeName || "HTML" ); -}; - -/** - * Sets document-related variables once based on the current document - * @param {Element|Object} [doc] An element or document object to use to set the document - * @returns {Object} Returns the current document - */ -setDocument = Sizzle.setDocument = function( node ) { - var hasCompare, subWindow, - doc = node ? node.ownerDocument || node : preferredDoc; - - // Return early if doc is invalid or already selected - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( doc == document || doc.nodeType !== 9 || !doc.documentElement ) { - return document; - } - - // Update global variables - document = doc; - docElem = document.documentElement; - documentIsHTML = !isXML( document ); - - // Support: IE 9 - 11+, Edge 12 - 18+ - // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( preferredDoc != document && - ( subWindow = document.defaultView ) && subWindow.top !== subWindow ) { - - // Support: IE 11, Edge - if ( subWindow.addEventListener ) { - subWindow.addEventListener( "unload", unloadHandler, false ); - - // Support: IE 9 - 10 only - } else if ( subWindow.attachEvent ) { - subWindow.attachEvent( "onunload", unloadHandler ); - } - } - - // Support: IE 8 - 11+, Edge 12 - 18+, Chrome <=16 - 25 only, Firefox <=3.6 - 31 only, - // Safari 4 - 5 only, Opera <=11.6 - 12.x only - // IE/Edge & older browsers don't support the :scope pseudo-class. - // Support: Safari 6.0 only - // Safari 6.0 supports :scope but it's an alias of :root there. - support.scope = assert( function( el ) { - docElem.appendChild( el ).appendChild( document.createElement( "div" ) ); - return typeof el.querySelectorAll !== "undefined" && - !el.querySelectorAll( ":scope fieldset div" ).length; - } ); - - /* Attributes - ---------------------------------------------------------------------- */ - - // Support: IE<8 - // Verify that getAttribute really returns attributes and not properties - // (excepting IE8 booleans) - support.attributes = assert( function( el ) { - el.className = "i"; - return !el.getAttribute( "className" ); - } ); - - /* getElement(s)By* - ---------------------------------------------------------------------- */ - - // Check if getElementsByTagName("*") returns only elements - support.getElementsByTagName = assert( function( el ) { - el.appendChild( document.createComment( "" ) ); - return !el.getElementsByTagName( "*" ).length; - } ); - - // Support: IE<9 - support.getElementsByClassName = rnative.test( document.getElementsByClassName ); - - // Support: IE<10 - // Check if getElementById returns elements by name - // The broken getElementById methods don't pick up programmatically-set names, - // so use a roundabout getElementsByName test - support.getById = assert( function( el ) { - docElem.appendChild( el ).id = expando; - return !document.getElementsByName || !document.getElementsByName( expando ).length; - } ); - - // ID filter and find - if ( support.getById ) { - Expr.filter[ "ID" ] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - return elem.getAttribute( "id" ) === attrId; - }; - }; - Expr.find[ "ID" ] = function( id, context ) { - if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { - var elem = context.getElementById( id ); - return elem ? [ elem ] : []; - } - }; - } else { - Expr.filter[ "ID" ] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - var node = typeof elem.getAttributeNode !== "undefined" && - elem.getAttributeNode( "id" ); - return node && node.value === attrId; - }; - }; - - // Support: IE 6 - 7 only - // getElementById is not reliable as a find shortcut - Expr.find[ "ID" ] = function( id, context ) { - if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { - var node, i, elems, - elem = context.getElementById( id ); - - if ( elem ) { - - // Verify the id attribute - node = elem.getAttributeNode( "id" ); - if ( node && node.value === id ) { - return [ elem ]; - } - - // Fall back on getElementsByName - elems = context.getElementsByName( id ); - i = 0; - while ( ( elem = elems[ i++ ] ) ) { - node = elem.getAttributeNode( "id" ); - if ( node && node.value === id ) { - return [ elem ]; - } - } - } - - return []; - } - }; - } - - // Tag - Expr.find[ "TAG" ] = support.getElementsByTagName ? - function( tag, context ) { - if ( typeof context.getElementsByTagName !== "undefined" ) { - return context.getElementsByTagName( tag ); - - // DocumentFragment nodes don't have gEBTN - } else if ( support.qsa ) { - return context.querySelectorAll( tag ); - } - } : - - function( tag, context ) { - var elem, - tmp = [], - i = 0, - - // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too - results = context.getElementsByTagName( tag ); - - // Filter out possible comments - if ( tag === "*" ) { - while ( ( elem = results[ i++ ] ) ) { - if ( elem.nodeType === 1 ) { - tmp.push( elem ); - } - } - - return tmp; - } - return results; - }; - - // Class - Expr.find[ "CLASS" ] = support.getElementsByClassName && function( className, context ) { - if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { - return context.getElementsByClassName( className ); - } - }; - - /* QSA/matchesSelector - ---------------------------------------------------------------------- */ - - // QSA and matchesSelector support - - // matchesSelector(:active) reports false when true (IE9/Opera 11.5) - rbuggyMatches = []; - - // qSa(:focus) reports false when true (Chrome 21) - // We allow this because of a bug in IE8/9 that throws an error - // whenever `document.activeElement` is accessed on an iframe - // So, we allow :focus to pass through QSA all the time to avoid the IE error - // See https://bugs.jquery.com/ticket/13378 - rbuggyQSA = []; - - if ( ( support.qsa = rnative.test( document.querySelectorAll ) ) ) { - - // Build QSA regex - // Regex strategy adopted from Diego Perini - assert( function( el ) { - - var input; - - // Select is set to empty string on purpose - // This is to test IE's treatment of not explicitly - // setting a boolean content attribute, - // since its presence should be enough - // https://bugs.jquery.com/ticket/12359 - docElem.appendChild( el ).innerHTML = "" + - ""; - - // Support: IE8, Opera 11-12.16 - // Nothing should be selected when empty strings follow ^= or $= or *= - // The test attribute must be unknown in Opera but "safe" for WinRT - // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section - if ( el.querySelectorAll( "[msallowcapture^='']" ).length ) { - rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); - } - - // Support: IE8 - // Boolean attributes and "value" are not treated correctly - if ( !el.querySelectorAll( "[selected]" ).length ) { - rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); - } - - // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ - if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { - rbuggyQSA.push( "~=" ); - } - - // Support: IE 11+, Edge 15 - 18+ - // IE 11/Edge don't find elements on a `[name='']` query in some cases. - // Adding a temporary attribute to the document before the selection works - // around the issue. - // Interestingly, IE 10 & older don't seem to have the issue. - input = document.createElement( "input" ); - input.setAttribute( "name", "" ); - el.appendChild( input ); - if ( !el.querySelectorAll( "[name='']" ).length ) { - rbuggyQSA.push( "\\[" + whitespace + "*name" + whitespace + "*=" + - whitespace + "*(?:''|\"\")" ); - } - - // Webkit/Opera - :checked should return selected option elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - // IE8 throws error here and will not see later tests - if ( !el.querySelectorAll( ":checked" ).length ) { - rbuggyQSA.push( ":checked" ); - } - - // Support: Safari 8+, iOS 8+ - // https://bugs.webkit.org/show_bug.cgi?id=136851 - // In-page `selector#id sibling-combinator selector` fails - if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { - rbuggyQSA.push( ".#.+[+~]" ); - } - - // Support: Firefox <=3.6 - 5 only - // Old Firefox doesn't throw on a badly-escaped identifier. - el.querySelectorAll( "\\\f" ); - rbuggyQSA.push( "[\\r\\n\\f]" ); - } ); - - assert( function( el ) { - el.innerHTML = "" + - ""; - - // Support: Windows 8 Native Apps - // The type and name attributes are restricted during .innerHTML assignment - var input = document.createElement( "input" ); - input.setAttribute( "type", "hidden" ); - el.appendChild( input ).setAttribute( "name", "D" ); - - // Support: IE8 - // Enforce case-sensitivity of name attribute - if ( el.querySelectorAll( "[name=d]" ).length ) { - rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); - } - - // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) - // IE8 throws error here and will not see later tests - if ( el.querySelectorAll( ":enabled" ).length !== 2 ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Support: IE9-11+ - // IE's :disabled selector does not pick up the children of disabled fieldsets - docElem.appendChild( el ).disabled = true; - if ( el.querySelectorAll( ":disabled" ).length !== 2 ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Support: Opera 10 - 11 only - // Opera 10-11 does not throw on post-comma invalid pseudos - el.querySelectorAll( "*,:x" ); - rbuggyQSA.push( ",.*:" ); - } ); - } - - if ( ( support.matchesSelector = rnative.test( ( matches = docElem.matches || - docElem.webkitMatchesSelector || - docElem.mozMatchesSelector || - docElem.oMatchesSelector || - docElem.msMatchesSelector ) ) ) ) { - - assert( function( el ) { - - // Check to see if it's possible to do matchesSelector - // on a disconnected node (IE 9) - support.disconnectedMatch = matches.call( el, "*" ); - - // This should fail with an exception - // Gecko does not error, returns false instead - matches.call( el, "[s!='']:x" ); - rbuggyMatches.push( "!=", pseudos ); - } ); - } - - rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join( "|" ) ); - rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join( "|" ) ); - - /* Contains - ---------------------------------------------------------------------- */ - hasCompare = rnative.test( docElem.compareDocumentPosition ); - - // Element contains another - // Purposefully self-exclusive - // As in, an element does not contain itself - contains = hasCompare || rnative.test( docElem.contains ) ? - function( a, b ) { - var adown = a.nodeType === 9 ? a.documentElement : a, - bup = b && b.parentNode; - return a === bup || !!( bup && bup.nodeType === 1 && ( - adown.contains ? - adown.contains( bup ) : - a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 - ) ); - } : - function( a, b ) { - if ( b ) { - while ( ( b = b.parentNode ) ) { - if ( b === a ) { - return true; - } - } - } - return false; - }; - - /* Sorting - ---------------------------------------------------------------------- */ - - // Document order sorting - sortOrder = hasCompare ? - function( a, b ) { - - // Flag for duplicate removal - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - // Sort on method existence if only one input has compareDocumentPosition - var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; - if ( compare ) { - return compare; - } - - // Calculate position if both inputs belong to the same document - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - compare = ( a.ownerDocument || a ) == ( b.ownerDocument || b ) ? - a.compareDocumentPosition( b ) : - - // Otherwise we know they are disconnected - 1; - - // Disconnected nodes - if ( compare & 1 || - ( !support.sortDetached && b.compareDocumentPosition( a ) === compare ) ) { - - // Choose the first element that is related to our preferred document - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( a == document || a.ownerDocument == preferredDoc && - contains( preferredDoc, a ) ) { - return -1; - } - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( b == document || b.ownerDocument == preferredDoc && - contains( preferredDoc, b ) ) { - return 1; - } - - // Maintain original order - return sortInput ? - ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : - 0; - } - - return compare & 4 ? -1 : 1; - } : - function( a, b ) { - - // Exit early if the nodes are identical - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - var cur, - i = 0, - aup = a.parentNode, - bup = b.parentNode, - ap = [ a ], - bp = [ b ]; - - // Parentless nodes are either documents or disconnected - if ( !aup || !bup ) { - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - /* eslint-disable eqeqeq */ - return a == document ? -1 : - b == document ? 1 : - /* eslint-enable eqeqeq */ - aup ? -1 : - bup ? 1 : - sortInput ? - ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : - 0; - - // If the nodes are siblings, we can do a quick check - } else if ( aup === bup ) { - return siblingCheck( a, b ); - } - - // Otherwise we need full lists of their ancestors for comparison - cur = a; - while ( ( cur = cur.parentNode ) ) { - ap.unshift( cur ); - } - cur = b; - while ( ( cur = cur.parentNode ) ) { - bp.unshift( cur ); - } - - // Walk down the tree looking for a discrepancy - while ( ap[ i ] === bp[ i ] ) { - i++; - } - - return i ? - - // Do a sibling check if the nodes have a common ancestor - siblingCheck( ap[ i ], bp[ i ] ) : - - // Otherwise nodes in our document sort first - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - /* eslint-disable eqeqeq */ - ap[ i ] == preferredDoc ? -1 : - bp[ i ] == preferredDoc ? 1 : - /* eslint-enable eqeqeq */ - 0; - }; - - return document; -}; - -Sizzle.matches = function( expr, elements ) { - return Sizzle( expr, null, null, elements ); -}; - -Sizzle.matchesSelector = function( elem, expr ) { - setDocument( elem ); - - if ( support.matchesSelector && documentIsHTML && - !nonnativeSelectorCache[ expr + " " ] && - ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && - ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { - - try { - var ret = matches.call( elem, expr ); - - // IE 9's matchesSelector returns false on disconnected nodes - if ( ret || support.disconnectedMatch || - - // As well, disconnected nodes are said to be in a document - // fragment in IE 9 - elem.document && elem.document.nodeType !== 11 ) { - return ret; - } - } catch ( e ) { - nonnativeSelectorCache( expr, true ); - } - } - - return Sizzle( expr, document, null, [ elem ] ).length > 0; -}; - -Sizzle.contains = function( context, elem ) { - - // Set document vars if needed - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( ( context.ownerDocument || context ) != document ) { - setDocument( context ); - } - return contains( context, elem ); -}; - -Sizzle.attr = function( elem, name ) { - - // Set document vars if needed - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( ( elem.ownerDocument || elem ) != document ) { - setDocument( elem ); - } - - var fn = Expr.attrHandle[ name.toLowerCase() ], - - // Don't get fooled by Object.prototype properties (jQuery #13807) - val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? - fn( elem, name, !documentIsHTML ) : - undefined; - - return val !== undefined ? - val : - support.attributes || !documentIsHTML ? - elem.getAttribute( name ) : - ( val = elem.getAttributeNode( name ) ) && val.specified ? - val.value : - null; -}; - -Sizzle.escape = function( sel ) { - return ( sel + "" ).replace( rcssescape, fcssescape ); -}; - -Sizzle.error = function( msg ) { - throw new Error( "Syntax error, unrecognized expression: " + msg ); -}; - -/** - * Document sorting and removing duplicates - * @param {ArrayLike} results - */ -Sizzle.uniqueSort = function( results ) { - var elem, - duplicates = [], - j = 0, - i = 0; - - // Unless we *know* we can detect duplicates, assume their presence - hasDuplicate = !support.detectDuplicates; - sortInput = !support.sortStable && results.slice( 0 ); - results.sort( sortOrder ); - - if ( hasDuplicate ) { - while ( ( elem = results[ i++ ] ) ) { - if ( elem === results[ i ] ) { - j = duplicates.push( i ); - } - } - while ( j-- ) { - results.splice( duplicates[ j ], 1 ); - } - } - - // Clear input after sorting to release objects - // See https://github.com/jquery/sizzle/pull/225 - sortInput = null; - - return results; -}; - -/** - * Utility function for retrieving the text value of an array of DOM nodes - * @param {Array|Element} elem - */ -getText = Sizzle.getText = function( elem ) { - var node, - ret = "", - i = 0, - nodeType = elem.nodeType; - - if ( !nodeType ) { - - // If no nodeType, this is expected to be an array - while ( ( node = elem[ i++ ] ) ) { - - // Do not traverse comment nodes - ret += getText( node ); - } - } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { - - // Use textContent for elements - // innerText usage removed for consistency of new lines (jQuery #11153) - if ( typeof elem.textContent === "string" ) { - return elem.textContent; - } else { - - // Traverse its children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - ret += getText( elem ); - } - } - } else if ( nodeType === 3 || nodeType === 4 ) { - return elem.nodeValue; - } - - // Do not include comment or processing instruction nodes - - return ret; -}; - -Expr = Sizzle.selectors = { - - // Can be adjusted by the user - cacheLength: 50, - - createPseudo: markFunction, - - match: matchExpr, - - attrHandle: {}, - - find: {}, - - relative: { - ">": { dir: "parentNode", first: true }, - " ": { dir: "parentNode" }, - "+": { dir: "previousSibling", first: true }, - "~": { dir: "previousSibling" } - }, - - preFilter: { - "ATTR": function( match ) { - match[ 1 ] = match[ 1 ].replace( runescape, funescape ); - - // Move the given value to match[3] whether quoted or unquoted - match[ 3 ] = ( match[ 3 ] || match[ 4 ] || - match[ 5 ] || "" ).replace( runescape, funescape ); - - if ( match[ 2 ] === "~=" ) { - match[ 3 ] = " " + match[ 3 ] + " "; - } - - return match.slice( 0, 4 ); - }, - - "CHILD": function( match ) { - - /* matches from matchExpr["CHILD"] - 1 type (only|nth|...) - 2 what (child|of-type) - 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) - 4 xn-component of xn+y argument ([+-]?\d*n|) - 5 sign of xn-component - 6 x of xn-component - 7 sign of y-component - 8 y of y-component - */ - match[ 1 ] = match[ 1 ].toLowerCase(); - - if ( match[ 1 ].slice( 0, 3 ) === "nth" ) { - - // nth-* requires argument - if ( !match[ 3 ] ) { - Sizzle.error( match[ 0 ] ); - } - - // numeric x and y parameters for Expr.filter.CHILD - // remember that false/true cast respectively to 0/1 - match[ 4 ] = +( match[ 4 ] ? - match[ 5 ] + ( match[ 6 ] || 1 ) : - 2 * ( match[ 3 ] === "even" || match[ 3 ] === "odd" ) ); - match[ 5 ] = +( ( match[ 7 ] + match[ 8 ] ) || match[ 3 ] === "odd" ); - - // other types prohibit arguments - } else if ( match[ 3 ] ) { - Sizzle.error( match[ 0 ] ); - } - - return match; - }, - - "PSEUDO": function( match ) { - var excess, - unquoted = !match[ 6 ] && match[ 2 ]; - - if ( matchExpr[ "CHILD" ].test( match[ 0 ] ) ) { - return null; - } - - // Accept quoted arguments as-is - if ( match[ 3 ] ) { - match[ 2 ] = match[ 4 ] || match[ 5 ] || ""; - - // Strip excess characters from unquoted arguments - } else if ( unquoted && rpseudo.test( unquoted ) && - - // Get excess from tokenize (recursively) - ( excess = tokenize( unquoted, true ) ) && - - // advance to the next closing parenthesis - ( excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length ) ) { - - // excess is a negative index - match[ 0 ] = match[ 0 ].slice( 0, excess ); - match[ 2 ] = unquoted.slice( 0, excess ); - } - - // Return only captures needed by the pseudo filter method (type and argument) - return match.slice( 0, 3 ); - } - }, - - filter: { - - "TAG": function( nodeNameSelector ) { - var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); - return nodeNameSelector === "*" ? - function() { - return true; - } : - function( elem ) { - return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; - }; - }, - - "CLASS": function( className ) { - var pattern = classCache[ className + " " ]; - - return pattern || - ( pattern = new RegExp( "(^|" + whitespace + - ")" + className + "(" + whitespace + "|$)" ) ) && classCache( - className, function( elem ) { - return pattern.test( - typeof elem.className === "string" && elem.className || - typeof elem.getAttribute !== "undefined" && - elem.getAttribute( "class" ) || - "" - ); - } ); - }, - - "ATTR": function( name, operator, check ) { - return function( elem ) { - var result = Sizzle.attr( elem, name ); - - if ( result == null ) { - return operator === "!="; - } - if ( !operator ) { - return true; - } - - result += ""; - - /* eslint-disable max-len */ - - return operator === "=" ? result === check : - operator === "!=" ? result !== check : - operator === "^=" ? check && result.indexOf( check ) === 0 : - operator === "*=" ? check && result.indexOf( check ) > -1 : - operator === "$=" ? check && result.slice( -check.length ) === check : - operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : - operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : - false; - /* eslint-enable max-len */ - - }; - }, - - "CHILD": function( type, what, _argument, first, last ) { - var simple = type.slice( 0, 3 ) !== "nth", - forward = type.slice( -4 ) !== "last", - ofType = what === "of-type"; - - return first === 1 && last === 0 ? - - // Shortcut for :nth-*(n) - function( elem ) { - return !!elem.parentNode; - } : - - function( elem, _context, xml ) { - var cache, uniqueCache, outerCache, node, nodeIndex, start, - dir = simple !== forward ? "nextSibling" : "previousSibling", - parent = elem.parentNode, - name = ofType && elem.nodeName.toLowerCase(), - useCache = !xml && !ofType, - diff = false; - - if ( parent ) { - - // :(first|last|only)-(child|of-type) - if ( simple ) { - while ( dir ) { - node = elem; - while ( ( node = node[ dir ] ) ) { - if ( ofType ? - node.nodeName.toLowerCase() === name : - node.nodeType === 1 ) { - - return false; - } - } - - // Reverse direction for :only-* (if we haven't yet done so) - start = dir = type === "only" && !start && "nextSibling"; - } - return true; - } - - start = [ forward ? parent.firstChild : parent.lastChild ]; - - // non-xml :nth-child(...) stores cache data on `parent` - if ( forward && useCache ) { - - // Seek `elem` from a previously-cached index - - // ...in a gzip-friendly way - node = parent; - outerCache = node[ expando ] || ( node[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - ( outerCache[ node.uniqueID ] = {} ); - - cache = uniqueCache[ type ] || []; - nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; - diff = nodeIndex && cache[ 2 ]; - node = nodeIndex && parent.childNodes[ nodeIndex ]; - - while ( ( node = ++nodeIndex && node && node[ dir ] || - - // Fallback to seeking `elem` from the start - ( diff = nodeIndex = 0 ) || start.pop() ) ) { - - // When found, cache indexes on `parent` and break - if ( node.nodeType === 1 && ++diff && node === elem ) { - uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; - break; - } - } - - } else { - - // Use previously-cached element index if available - if ( useCache ) { - - // ...in a gzip-friendly way - node = elem; - outerCache = node[ expando ] || ( node[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - ( outerCache[ node.uniqueID ] = {} ); - - cache = uniqueCache[ type ] || []; - nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; - diff = nodeIndex; - } - - // xml :nth-child(...) - // or :nth-last-child(...) or :nth(-last)?-of-type(...) - if ( diff === false ) { - - // Use the same loop as above to seek `elem` from the start - while ( ( node = ++nodeIndex && node && node[ dir ] || - ( diff = nodeIndex = 0 ) || start.pop() ) ) { - - if ( ( ofType ? - node.nodeName.toLowerCase() === name : - node.nodeType === 1 ) && - ++diff ) { - - // Cache the index of each encountered element - if ( useCache ) { - outerCache = node[ expando ] || - ( node[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - ( outerCache[ node.uniqueID ] = {} ); - - uniqueCache[ type ] = [ dirruns, diff ]; - } - - if ( node === elem ) { - break; - } - } - } - } - } - - // Incorporate the offset, then check against cycle size - diff -= last; - return diff === first || ( diff % first === 0 && diff / first >= 0 ); - } - }; - }, - - "PSEUDO": function( pseudo, argument ) { - - // pseudo-class names are case-insensitive - // http://www.w3.org/TR/selectors/#pseudo-classes - // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters - // Remember that setFilters inherits from pseudos - var args, - fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || - Sizzle.error( "unsupported pseudo: " + pseudo ); - - // The user may use createPseudo to indicate that - // arguments are needed to create the filter function - // just as Sizzle does - if ( fn[ expando ] ) { - return fn( argument ); - } - - // But maintain support for old signatures - if ( fn.length > 1 ) { - args = [ pseudo, pseudo, "", argument ]; - return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? - markFunction( function( seed, matches ) { - var idx, - matched = fn( seed, argument ), - i = matched.length; - while ( i-- ) { - idx = indexOf( seed, matched[ i ] ); - seed[ idx ] = !( matches[ idx ] = matched[ i ] ); - } - } ) : - function( elem ) { - return fn( elem, 0, args ); - }; - } - - return fn; - } - }, - - pseudos: { - - // Potentially complex pseudos - "not": markFunction( function( selector ) { - - // Trim the selector passed to compile - // to avoid treating leading and trailing - // spaces as combinators - var input = [], - results = [], - matcher = compile( selector.replace( rtrim, "$1" ) ); - - return matcher[ expando ] ? - markFunction( function( seed, matches, _context, xml ) { - var elem, - unmatched = matcher( seed, null, xml, [] ), - i = seed.length; - - // Match elements unmatched by `matcher` - while ( i-- ) { - if ( ( elem = unmatched[ i ] ) ) { - seed[ i ] = !( matches[ i ] = elem ); - } - } - } ) : - function( elem, _context, xml ) { - input[ 0 ] = elem; - matcher( input, null, xml, results ); - - // Don't keep the element (issue #299) - input[ 0 ] = null; - return !results.pop(); - }; - } ), - - "has": markFunction( function( selector ) { - return function( elem ) { - return Sizzle( selector, elem ).length > 0; - }; - } ), - - "contains": markFunction( function( text ) { - text = text.replace( runescape, funescape ); - return function( elem ) { - return ( elem.textContent || getText( elem ) ).indexOf( text ) > -1; - }; - } ), - - // "Whether an element is represented by a :lang() selector - // is based solely on the element's language value - // being equal to the identifier C, - // or beginning with the identifier C immediately followed by "-". - // The matching of C against the element's language value is performed case-insensitively. - // The identifier C does not have to be a valid language name." - // http://www.w3.org/TR/selectors/#lang-pseudo - "lang": markFunction( function( lang ) { - - // lang value must be a valid identifier - if ( !ridentifier.test( lang || "" ) ) { - Sizzle.error( "unsupported lang: " + lang ); - } - lang = lang.replace( runescape, funescape ).toLowerCase(); - return function( elem ) { - var elemLang; - do { - if ( ( elemLang = documentIsHTML ? - elem.lang : - elem.getAttribute( "xml:lang" ) || elem.getAttribute( "lang" ) ) ) { - - elemLang = elemLang.toLowerCase(); - return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; - } - } while ( ( elem = elem.parentNode ) && elem.nodeType === 1 ); - return false; - }; - } ), - - // Miscellaneous - "target": function( elem ) { - var hash = window.location && window.location.hash; - return hash && hash.slice( 1 ) === elem.id; - }, - - "root": function( elem ) { - return elem === docElem; - }, - - "focus": function( elem ) { - return elem === document.activeElement && - ( !document.hasFocus || document.hasFocus() ) && - !!( elem.type || elem.href || ~elem.tabIndex ); - }, - - // Boolean properties - "enabled": createDisabledPseudo( false ), - "disabled": createDisabledPseudo( true ), - - "checked": function( elem ) { - - // In CSS3, :checked should return both checked and selected elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - var nodeName = elem.nodeName.toLowerCase(); - return ( nodeName === "input" && !!elem.checked ) || - ( nodeName === "option" && !!elem.selected ); - }, - - "selected": function( elem ) { - - // Accessing this property makes selected-by-default - // options in Safari work properly - if ( elem.parentNode ) { - // eslint-disable-next-line no-unused-expressions - elem.parentNode.selectedIndex; - } - - return elem.selected === true; - }, - - // Contents - "empty": function( elem ) { - - // http://www.w3.org/TR/selectors/#empty-pseudo - // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), - // but not by others (comment: 8; processing instruction: 7; etc.) - // nodeType < 6 works because attributes (2) do not appear as children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - if ( elem.nodeType < 6 ) { - return false; - } - } - return true; - }, - - "parent": function( elem ) { - return !Expr.pseudos[ "empty" ]( elem ); - }, - - // Element/input types - "header": function( elem ) { - return rheader.test( elem.nodeName ); - }, - - "input": function( elem ) { - return rinputs.test( elem.nodeName ); - }, - - "button": function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === "button" || name === "button"; - }, - - "text": function( elem ) { - var attr; - return elem.nodeName.toLowerCase() === "input" && - elem.type === "text" && - - // Support: IE<8 - // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" - ( ( attr = elem.getAttribute( "type" ) ) == null || - attr.toLowerCase() === "text" ); - }, - - // Position-in-collection - "first": createPositionalPseudo( function() { - return [ 0 ]; - } ), - - "last": createPositionalPseudo( function( _matchIndexes, length ) { - return [ length - 1 ]; - } ), - - "eq": createPositionalPseudo( function( _matchIndexes, length, argument ) { - return [ argument < 0 ? argument + length : argument ]; - } ), - - "even": createPositionalPseudo( function( matchIndexes, length ) { - var i = 0; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ), - - "odd": createPositionalPseudo( function( matchIndexes, length ) { - var i = 1; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ), - - "lt": createPositionalPseudo( function( matchIndexes, length, argument ) { - var i = argument < 0 ? - argument + length : - argument > length ? - length : - argument; - for ( ; --i >= 0; ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ), - - "gt": createPositionalPseudo( function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; ++i < length; ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ) - } -}; - -Expr.pseudos[ "nth" ] = Expr.pseudos[ "eq" ]; - -// Add button/input type pseudos -for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { - Expr.pseudos[ i ] = createInputPseudo( i ); -} -for ( i in { submit: true, reset: true } ) { - Expr.pseudos[ i ] = createButtonPseudo( i ); -} - -// Easy API for creating new setFilters -function setFilters() {} -setFilters.prototype = Expr.filters = Expr.pseudos; -Expr.setFilters = new setFilters(); - -tokenize = Sizzle.tokenize = function( selector, parseOnly ) { - var matched, match, tokens, type, - soFar, groups, preFilters, - cached = tokenCache[ selector + " " ]; - - if ( cached ) { - return parseOnly ? 0 : cached.slice( 0 ); - } - - soFar = selector; - groups = []; - preFilters = Expr.preFilter; - - while ( soFar ) { - - // Comma and first run - if ( !matched || ( match = rcomma.exec( soFar ) ) ) { - if ( match ) { - - // Don't consume trailing commas as valid - soFar = soFar.slice( match[ 0 ].length ) || soFar; - } - groups.push( ( tokens = [] ) ); - } - - matched = false; - - // Combinators - if ( ( match = rcombinators.exec( soFar ) ) ) { - matched = match.shift(); - tokens.push( { - value: matched, - - // Cast descendant combinators to space - type: match[ 0 ].replace( rtrim, " " ) - } ); - soFar = soFar.slice( matched.length ); - } - - // Filters - for ( type in Expr.filter ) { - if ( ( match = matchExpr[ type ].exec( soFar ) ) && ( !preFilters[ type ] || - ( match = preFilters[ type ]( match ) ) ) ) { - matched = match.shift(); - tokens.push( { - value: matched, - type: type, - matches: match - } ); - soFar = soFar.slice( matched.length ); - } - } - - if ( !matched ) { - break; - } - } - - // Return the length of the invalid excess - // if we're just parsing - // Otherwise, throw an error or return tokens - return parseOnly ? - soFar.length : - soFar ? - Sizzle.error( selector ) : - - // Cache the tokens - tokenCache( selector, groups ).slice( 0 ); -}; - -function toSelector( tokens ) { - var i = 0, - len = tokens.length, - selector = ""; - for ( ; i < len; i++ ) { - selector += tokens[ i ].value; - } - return selector; -} - -function addCombinator( matcher, combinator, base ) { - var dir = combinator.dir, - skip = combinator.next, - key = skip || dir, - checkNonElements = base && key === "parentNode", - doneName = done++; - - return combinator.first ? - - // Check against closest ancestor/preceding element - function( elem, context, xml ) { - while ( ( elem = elem[ dir ] ) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - return matcher( elem, context, xml ); - } - } - return false; - } : - - // Check against all ancestor/preceding elements - function( elem, context, xml ) { - var oldCache, uniqueCache, outerCache, - newCache = [ dirruns, doneName ]; - - // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching - if ( xml ) { - while ( ( elem = elem[ dir ] ) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - if ( matcher( elem, context, xml ) ) { - return true; - } - } - } - } else { - while ( ( elem = elem[ dir ] ) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - outerCache = elem[ expando ] || ( elem[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ elem.uniqueID ] || - ( outerCache[ elem.uniqueID ] = {} ); - - if ( skip && skip === elem.nodeName.toLowerCase() ) { - elem = elem[ dir ] || elem; - } else if ( ( oldCache = uniqueCache[ key ] ) && - oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { - - // Assign to newCache so results back-propagate to previous elements - return ( newCache[ 2 ] = oldCache[ 2 ] ); - } else { - - // Reuse newcache so results back-propagate to previous elements - uniqueCache[ key ] = newCache; - - // A match means we're done; a fail means we have to keep checking - if ( ( newCache[ 2 ] = matcher( elem, context, xml ) ) ) { - return true; - } - } - } - } - } - return false; - }; -} - -function elementMatcher( matchers ) { - return matchers.length > 1 ? - function( elem, context, xml ) { - var i = matchers.length; - while ( i-- ) { - if ( !matchers[ i ]( elem, context, xml ) ) { - return false; - } - } - return true; - } : - matchers[ 0 ]; -} - -function multipleContexts( selector, contexts, results ) { - var i = 0, - len = contexts.length; - for ( ; i < len; i++ ) { - Sizzle( selector, contexts[ i ], results ); - } - return results; -} - -function condense( unmatched, map, filter, context, xml ) { - var elem, - newUnmatched = [], - i = 0, - len = unmatched.length, - mapped = map != null; - - for ( ; i < len; i++ ) { - if ( ( elem = unmatched[ i ] ) ) { - if ( !filter || filter( elem, context, xml ) ) { - newUnmatched.push( elem ); - if ( mapped ) { - map.push( i ); - } - } - } - } - - return newUnmatched; -} - -function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { - if ( postFilter && !postFilter[ expando ] ) { - postFilter = setMatcher( postFilter ); - } - if ( postFinder && !postFinder[ expando ] ) { - postFinder = setMatcher( postFinder, postSelector ); - } - return markFunction( function( seed, results, context, xml ) { - var temp, i, elem, - preMap = [], - postMap = [], - preexisting = results.length, - - // Get initial elements from seed or context - elems = seed || multipleContexts( - selector || "*", - context.nodeType ? [ context ] : context, - [] - ), - - // Prefilter to get matcher input, preserving a map for seed-results synchronization - matcherIn = preFilter && ( seed || !selector ) ? - condense( elems, preMap, preFilter, context, xml ) : - elems, - - matcherOut = matcher ? - - // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, - postFinder || ( seed ? preFilter : preexisting || postFilter ) ? - - // ...intermediate processing is necessary - [] : - - // ...otherwise use results directly - results : - matcherIn; - - // Find primary matches - if ( matcher ) { - matcher( matcherIn, matcherOut, context, xml ); - } - - // Apply postFilter - if ( postFilter ) { - temp = condense( matcherOut, postMap ); - postFilter( temp, [], context, xml ); - - // Un-match failing elements by moving them back to matcherIn - i = temp.length; - while ( i-- ) { - if ( ( elem = temp[ i ] ) ) { - matcherOut[ postMap[ i ] ] = !( matcherIn[ postMap[ i ] ] = elem ); - } - } - } - - if ( seed ) { - if ( postFinder || preFilter ) { - if ( postFinder ) { - - // Get the final matcherOut by condensing this intermediate into postFinder contexts - temp = []; - i = matcherOut.length; - while ( i-- ) { - if ( ( elem = matcherOut[ i ] ) ) { - - // Restore matcherIn since elem is not yet a final match - temp.push( ( matcherIn[ i ] = elem ) ); - } - } - postFinder( null, ( matcherOut = [] ), temp, xml ); - } - - // Move matched elements from seed to results to keep them synchronized - i = matcherOut.length; - while ( i-- ) { - if ( ( elem = matcherOut[ i ] ) && - ( temp = postFinder ? indexOf( seed, elem ) : preMap[ i ] ) > -1 ) { - - seed[ temp ] = !( results[ temp ] = elem ); - } - } - } - - // Add elements to results, through postFinder if defined - } else { - matcherOut = condense( - matcherOut === results ? - matcherOut.splice( preexisting, matcherOut.length ) : - matcherOut - ); - if ( postFinder ) { - postFinder( null, results, matcherOut, xml ); - } else { - push.apply( results, matcherOut ); - } - } - } ); -} - -function matcherFromTokens( tokens ) { - var checkContext, matcher, j, - len = tokens.length, - leadingRelative = Expr.relative[ tokens[ 0 ].type ], - implicitRelative = leadingRelative || Expr.relative[ " " ], - i = leadingRelative ? 1 : 0, - - // The foundational matcher ensures that elements are reachable from top-level context(s) - matchContext = addCombinator( function( elem ) { - return elem === checkContext; - }, implicitRelative, true ), - matchAnyContext = addCombinator( function( elem ) { - return indexOf( checkContext, elem ) > -1; - }, implicitRelative, true ), - matchers = [ function( elem, context, xml ) { - var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( - ( checkContext = context ).nodeType ? - matchContext( elem, context, xml ) : - matchAnyContext( elem, context, xml ) ); - - // Avoid hanging onto element (issue #299) - checkContext = null; - return ret; - } ]; - - for ( ; i < len; i++ ) { - if ( ( matcher = Expr.relative[ tokens[ i ].type ] ) ) { - matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; - } else { - matcher = Expr.filter[ tokens[ i ].type ].apply( null, tokens[ i ].matches ); - - // Return special upon seeing a positional matcher - if ( matcher[ expando ] ) { - - // Find the next relative operator (if any) for proper handling - j = ++i; - for ( ; j < len; j++ ) { - if ( Expr.relative[ tokens[ j ].type ] ) { - break; - } - } - return setMatcher( - i > 1 && elementMatcher( matchers ), - i > 1 && toSelector( - - // If the preceding token was a descendant combinator, insert an implicit any-element `*` - tokens - .slice( 0, i - 1 ) - .concat( { value: tokens[ i - 2 ].type === " " ? "*" : "" } ) - ).replace( rtrim, "$1" ), - matcher, - i < j && matcherFromTokens( tokens.slice( i, j ) ), - j < len && matcherFromTokens( ( tokens = tokens.slice( j ) ) ), - j < len && toSelector( tokens ) - ); - } - matchers.push( matcher ); - } - } - - return elementMatcher( matchers ); -} - -function matcherFromGroupMatchers( elementMatchers, setMatchers ) { - var bySet = setMatchers.length > 0, - byElement = elementMatchers.length > 0, - superMatcher = function( seed, context, xml, results, outermost ) { - var elem, j, matcher, - matchedCount = 0, - i = "0", - unmatched = seed && [], - setMatched = [], - contextBackup = outermostContext, - - // We must always have either seed elements or outermost context - elems = seed || byElement && Expr.find[ "TAG" ]( "*", outermost ), - - // Use integer dirruns iff this is the outermost matcher - dirrunsUnique = ( dirruns += contextBackup == null ? 1 : Math.random() || 0.1 ), - len = elems.length; - - if ( outermost ) { - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - outermostContext = context == document || context || outermost; - } - - // Add elements passing elementMatchers directly to results - // Support: IE<9, Safari - // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id - for ( ; i !== len && ( elem = elems[ i ] ) != null; i++ ) { - if ( byElement && elem ) { - j = 0; - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( !context && elem.ownerDocument != document ) { - setDocument( elem ); - xml = !documentIsHTML; - } - while ( ( matcher = elementMatchers[ j++ ] ) ) { - if ( matcher( elem, context || document, xml ) ) { - results.push( elem ); - break; - } - } - if ( outermost ) { - dirruns = dirrunsUnique; - } - } - - // Track unmatched elements for set filters - if ( bySet ) { - - // They will have gone through all possible matchers - if ( ( elem = !matcher && elem ) ) { - matchedCount--; - } - - // Lengthen the array for every element, matched or not - if ( seed ) { - unmatched.push( elem ); - } - } - } - - // `i` is now the count of elements visited above, and adding it to `matchedCount` - // makes the latter nonnegative. - matchedCount += i; - - // Apply set filters to unmatched elements - // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` - // equals `i`), unless we didn't visit _any_ elements in the above loop because we have - // no element matchers and no seed. - // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that - // case, which will result in a "00" `matchedCount` that differs from `i` but is also - // numerically zero. - if ( bySet && i !== matchedCount ) { - j = 0; - while ( ( matcher = setMatchers[ j++ ] ) ) { - matcher( unmatched, setMatched, context, xml ); - } - - if ( seed ) { - - // Reintegrate element matches to eliminate the need for sorting - if ( matchedCount > 0 ) { - while ( i-- ) { - if ( !( unmatched[ i ] || setMatched[ i ] ) ) { - setMatched[ i ] = pop.call( results ); - } - } - } - - // Discard index placeholder values to get only actual matches - setMatched = condense( setMatched ); - } - - // Add matches to results - push.apply( results, setMatched ); - - // Seedless set matches succeeding multiple successful matchers stipulate sorting - if ( outermost && !seed && setMatched.length > 0 && - ( matchedCount + setMatchers.length ) > 1 ) { - - Sizzle.uniqueSort( results ); - } - } - - // Override manipulation of globals by nested matchers - if ( outermost ) { - dirruns = dirrunsUnique; - outermostContext = contextBackup; - } - - return unmatched; - }; - - return bySet ? - markFunction( superMatcher ) : - superMatcher; -} - -compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { - var i, - setMatchers = [], - elementMatchers = [], - cached = compilerCache[ selector + " " ]; - - if ( !cached ) { - - // Generate a function of recursive functions that can be used to check each element - if ( !match ) { - match = tokenize( selector ); - } - i = match.length; - while ( i-- ) { - cached = matcherFromTokens( match[ i ] ); - if ( cached[ expando ] ) { - setMatchers.push( cached ); - } else { - elementMatchers.push( cached ); - } - } - - // Cache the compiled function - cached = compilerCache( - selector, - matcherFromGroupMatchers( elementMatchers, setMatchers ) - ); - - // Save selector and tokenization - cached.selector = selector; - } - return cached; -}; - -/** - * A low-level selection function that works with Sizzle's compiled - * selector functions - * @param {String|Function} selector A selector or a pre-compiled - * selector function built with Sizzle.compile - * @param {Element} context - * @param {Array} [results] - * @param {Array} [seed] A set of elements to match against - */ -select = Sizzle.select = function( selector, context, results, seed ) { - var i, tokens, token, type, find, - compiled = typeof selector === "function" && selector, - match = !seed && tokenize( ( selector = compiled.selector || selector ) ); - - results = results || []; - - // Try to minimize operations if there is only one selector in the list and no seed - // (the latter of which guarantees us context) - if ( match.length === 1 ) { - - // Reduce context if the leading compound selector is an ID - tokens = match[ 0 ] = match[ 0 ].slice( 0 ); - if ( tokens.length > 2 && ( token = tokens[ 0 ] ).type === "ID" && - context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[ 1 ].type ] ) { - - context = ( Expr.find[ "ID" ]( token.matches[ 0 ] - .replace( runescape, funescape ), context ) || [] )[ 0 ]; - if ( !context ) { - return results; - - // Precompiled matchers will still verify ancestry, so step up a level - } else if ( compiled ) { - context = context.parentNode; - } - - selector = selector.slice( tokens.shift().value.length ); - } - - // Fetch a seed set for right-to-left matching - i = matchExpr[ "needsContext" ].test( selector ) ? 0 : tokens.length; - while ( i-- ) { - token = tokens[ i ]; - - // Abort if we hit a combinator - if ( Expr.relative[ ( type = token.type ) ] ) { - break; - } - if ( ( find = Expr.find[ type ] ) ) { - - // Search, expanding context for leading sibling combinators - if ( ( seed = find( - token.matches[ 0 ].replace( runescape, funescape ), - rsibling.test( tokens[ 0 ].type ) && testContext( context.parentNode ) || - context - ) ) ) { - - // If seed is empty or no tokens remain, we can return early - tokens.splice( i, 1 ); - selector = seed.length && toSelector( tokens ); - if ( !selector ) { - push.apply( results, seed ); - return results; - } - - break; - } - } - } - } - - // Compile and execute a filtering function if one is not provided - // Provide `match` to avoid retokenization if we modified the selector above - ( compiled || compile( selector, match ) )( - seed, - context, - !documentIsHTML, - results, - !context || rsibling.test( selector ) && testContext( context.parentNode ) || context - ); - return results; -}; - -// One-time assignments - -// Sort stability -support.sortStable = expando.split( "" ).sort( sortOrder ).join( "" ) === expando; - -// Support: Chrome 14-35+ -// Always assume duplicates if they aren't passed to the comparison function -support.detectDuplicates = !!hasDuplicate; - -// Initialize against the default document -setDocument(); - -// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) -// Detached nodes confoundingly follow *each other* -support.sortDetached = assert( function( el ) { - - // Should return 1, but returns 4 (following) - return el.compareDocumentPosition( document.createElement( "fieldset" ) ) & 1; -} ); - -// Support: IE<8 -// Prevent attribute/property "interpolation" -// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx -if ( !assert( function( el ) { - el.innerHTML = ""; - return el.firstChild.getAttribute( "href" ) === "#"; -} ) ) { - addHandle( "type|href|height|width", function( elem, name, isXML ) { - if ( !isXML ) { - return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); - } - } ); -} - -// Support: IE<9 -// Use defaultValue in place of getAttribute("value") -if ( !support.attributes || !assert( function( el ) { - el.innerHTML = ""; - el.firstChild.setAttribute( "value", "" ); - return el.firstChild.getAttribute( "value" ) === ""; -} ) ) { - addHandle( "value", function( elem, _name, isXML ) { - if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { - return elem.defaultValue; - } - } ); -} - -// Support: IE<9 -// Use getAttributeNode to fetch booleans when getAttribute lies -if ( !assert( function( el ) { - return el.getAttribute( "disabled" ) == null; -} ) ) { - addHandle( booleans, function( elem, name, isXML ) { - var val; - if ( !isXML ) { - return elem[ name ] === true ? name.toLowerCase() : - ( val = elem.getAttributeNode( name ) ) && val.specified ? - val.value : - null; - } - } ); -} - -return Sizzle; - -} )( window ); - - - -jQuery.find = Sizzle; -jQuery.expr = Sizzle.selectors; - -// Deprecated -jQuery.expr[ ":" ] = jQuery.expr.pseudos; -jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; -jQuery.text = Sizzle.getText; -jQuery.isXMLDoc = Sizzle.isXML; -jQuery.contains = Sizzle.contains; -jQuery.escapeSelector = Sizzle.escape; - - - - -var dir = function( elem, dir, until ) { - var matched = [], - truncate = until !== undefined; - - while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { - if ( elem.nodeType === 1 ) { - if ( truncate && jQuery( elem ).is( until ) ) { - break; - } - matched.push( elem ); - } - } - return matched; -}; - - -var siblings = function( n, elem ) { - var matched = []; - - for ( ; n; n = n.nextSibling ) { - if ( n.nodeType === 1 && n !== elem ) { - matched.push( n ); - } - } - - return matched; -}; - - -var rneedsContext = jQuery.expr.match.needsContext; - - - -function nodeName( elem, name ) { - - return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); - -}; -var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); - - - -// Implement the identical functionality for filter and not -function winnow( elements, qualifier, not ) { - if ( isFunction( qualifier ) ) { - return jQuery.grep( elements, function( elem, i ) { - return !!qualifier.call( elem, i, elem ) !== not; - } ); - } - - // Single element - if ( qualifier.nodeType ) { - return jQuery.grep( elements, function( elem ) { - return ( elem === qualifier ) !== not; - } ); - } - - // Arraylike of elements (jQuery, arguments, Array) - if ( typeof qualifier !== "string" ) { - return jQuery.grep( elements, function( elem ) { - return ( indexOf.call( qualifier, elem ) > -1 ) !== not; - } ); - } - - // Filtered directly for both simple and complex selectors - return jQuery.filter( qualifier, elements, not ); -} - -jQuery.filter = function( expr, elems, not ) { - var elem = elems[ 0 ]; - - if ( not ) { - expr = ":not(" + expr + ")"; - } - - if ( elems.length === 1 && elem.nodeType === 1 ) { - return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; - } - - return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { - return elem.nodeType === 1; - } ) ); -}; - -jQuery.fn.extend( { - find: function( selector ) { - var i, ret, - len = this.length, - self = this; - - if ( typeof selector !== "string" ) { - return this.pushStack( jQuery( selector ).filter( function() { - for ( i = 0; i < len; i++ ) { - if ( jQuery.contains( self[ i ], this ) ) { - return true; - } - } - } ) ); - } - - ret = this.pushStack( [] ); - - for ( i = 0; i < len; i++ ) { - jQuery.find( selector, self[ i ], ret ); - } - - return len > 1 ? jQuery.uniqueSort( ret ) : ret; - }, - filter: function( selector ) { - return this.pushStack( winnow( this, selector || [], false ) ); - }, - not: function( selector ) { - return this.pushStack( winnow( this, selector || [], true ) ); - }, - is: function( selector ) { - return !!winnow( - this, - - // If this is a positional/relative selector, check membership in the returned set - // so $("p:first").is("p:last") won't return true for a doc with two "p". - typeof selector === "string" && rneedsContext.test( selector ) ? - jQuery( selector ) : - selector || [], - false - ).length; - } -} ); - - -// Initialize a jQuery object - - -// A central reference to the root jQuery(document) -var rootjQuery, - - // A simple way to check for HTML strings - // Prioritize #id over to avoid XSS via location.hash (#9521) - // Strict HTML recognition (#11290: must start with <) - // Shortcut simple #id case for speed - rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, - - init = jQuery.fn.init = function( selector, context, root ) { - var match, elem; - - // HANDLE: $(""), $(null), $(undefined), $(false) - if ( !selector ) { - return this; - } - - // Method init() accepts an alternate rootjQuery - // so migrate can support jQuery.sub (gh-2101) - root = root || rootjQuery; - - // Handle HTML strings - if ( typeof selector === "string" ) { - if ( selector[ 0 ] === "<" && - selector[ selector.length - 1 ] === ">" && - selector.length >= 3 ) { - - // Assume that strings that start and end with <> are HTML and skip the regex check - match = [ null, selector, null ]; - - } else { - match = rquickExpr.exec( selector ); - } - - // Match html or make sure no context is specified for #id - if ( match && ( match[ 1 ] || !context ) ) { - - // HANDLE: $(html) -> $(array) - if ( match[ 1 ] ) { - context = context instanceof jQuery ? context[ 0 ] : context; - - // Option to run scripts is true for back-compat - // Intentionally let the error be thrown if parseHTML is not present - jQuery.merge( this, jQuery.parseHTML( - match[ 1 ], - context && context.nodeType ? context.ownerDocument || context : document, - true - ) ); - - // HANDLE: $(html, props) - if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { - for ( match in context ) { - - // Properties of context are called as methods if possible - if ( isFunction( this[ match ] ) ) { - this[ match ]( context[ match ] ); - - // ...and otherwise set as attributes - } else { - this.attr( match, context[ match ] ); - } - } - } - - return this; - - // HANDLE: $(#id) - } else { - elem = document.getElementById( match[ 2 ] ); - - if ( elem ) { - - // Inject the element directly into the jQuery object - this[ 0 ] = elem; - this.length = 1; - } - return this; - } - - // HANDLE: $(expr, $(...)) - } else if ( !context || context.jquery ) { - return ( context || root ).find( selector ); - - // HANDLE: $(expr, context) - // (which is just equivalent to: $(context).find(expr) - } else { - return this.constructor( context ).find( selector ); - } - - // HANDLE: $(DOMElement) - } else if ( selector.nodeType ) { - this[ 0 ] = selector; - this.length = 1; - return this; - - // HANDLE: $(function) - // Shortcut for document ready - } else if ( isFunction( selector ) ) { - return root.ready !== undefined ? - root.ready( selector ) : - - // Execute immediately if ready is not present - selector( jQuery ); - } - - return jQuery.makeArray( selector, this ); - }; - -// Give the init function the jQuery prototype for later instantiation -init.prototype = jQuery.fn; - -// Initialize central reference -rootjQuery = jQuery( document ); - - -var rparentsprev = /^(?:parents|prev(?:Until|All))/, - - // Methods guaranteed to produce a unique set when starting from a unique set - guaranteedUnique = { - children: true, - contents: true, - next: true, - prev: true - }; - -jQuery.fn.extend( { - has: function( target ) { - var targets = jQuery( target, this ), - l = targets.length; - - return this.filter( function() { - var i = 0; - for ( ; i < l; i++ ) { - if ( jQuery.contains( this, targets[ i ] ) ) { - return true; - } - } - } ); - }, - - closest: function( selectors, context ) { - var cur, - i = 0, - l = this.length, - matched = [], - targets = typeof selectors !== "string" && jQuery( selectors ); - - // Positional selectors never match, since there's no _selection_ context - if ( !rneedsContext.test( selectors ) ) { - for ( ; i < l; i++ ) { - for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { - - // Always skip document fragments - if ( cur.nodeType < 11 && ( targets ? - targets.index( cur ) > -1 : - - // Don't pass non-elements to Sizzle - cur.nodeType === 1 && - jQuery.find.matchesSelector( cur, selectors ) ) ) { - - matched.push( cur ); - break; - } - } - } - } - - return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); - }, - - // Determine the position of an element within the set - index: function( elem ) { - - // No argument, return index in parent - if ( !elem ) { - return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; - } - - // Index in selector - if ( typeof elem === "string" ) { - return indexOf.call( jQuery( elem ), this[ 0 ] ); - } - - // Locate the position of the desired element - return indexOf.call( this, - - // If it receives a jQuery object, the first element is used - elem.jquery ? elem[ 0 ] : elem - ); - }, - - add: function( selector, context ) { - return this.pushStack( - jQuery.uniqueSort( - jQuery.merge( this.get(), jQuery( selector, context ) ) - ) - ); - }, - - addBack: function( selector ) { - return this.add( selector == null ? - this.prevObject : this.prevObject.filter( selector ) - ); - } -} ); - -function sibling( cur, dir ) { - while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} - return cur; -} - -jQuery.each( { - parent: function( elem ) { - var parent = elem.parentNode; - return parent && parent.nodeType !== 11 ? parent : null; - }, - parents: function( elem ) { - return dir( elem, "parentNode" ); - }, - parentsUntil: function( elem, _i, until ) { - return dir( elem, "parentNode", until ); - }, - next: function( elem ) { - return sibling( elem, "nextSibling" ); - }, - prev: function( elem ) { - return sibling( elem, "previousSibling" ); - }, - nextAll: function( elem ) { - return dir( elem, "nextSibling" ); - }, - prevAll: function( elem ) { - return dir( elem, "previousSibling" ); - }, - nextUntil: function( elem, _i, until ) { - return dir( elem, "nextSibling", until ); - }, - prevUntil: function( elem, _i, until ) { - return dir( elem, "previousSibling", until ); - }, - siblings: function( elem ) { - return siblings( ( elem.parentNode || {} ).firstChild, elem ); - }, - children: function( elem ) { - return siblings( elem.firstChild ); - }, - contents: function( elem ) { - if ( elem.contentDocument != null && - - // Support: IE 11+ - // elements with no `data` attribute has an object - // `contentDocument` with a `null` prototype. - getProto( elem.contentDocument ) ) { - - return elem.contentDocument; - } - - // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only - // Treat the template element as a regular one in browsers that - // don't support it. - if ( nodeName( elem, "template" ) ) { - elem = elem.content || elem; - } - - return jQuery.merge( [], elem.childNodes ); - } -}, function( name, fn ) { - jQuery.fn[ name ] = function( until, selector ) { - var matched = jQuery.map( this, fn, until ); - - if ( name.slice( -5 ) !== "Until" ) { - selector = until; - } - - if ( selector && typeof selector === "string" ) { - matched = jQuery.filter( selector, matched ); - } - - if ( this.length > 1 ) { - - // Remove duplicates - if ( !guaranteedUnique[ name ] ) { - jQuery.uniqueSort( matched ); - } - - // Reverse order for parents* and prev-derivatives - if ( rparentsprev.test( name ) ) { - matched.reverse(); - } - } - - return this.pushStack( matched ); - }; -} ); -var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); - - - -// Convert String-formatted options into Object-formatted ones -function createOptions( options ) { - var object = {}; - jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { - object[ flag ] = true; - } ); - return object; -} - -/* - * Create a callback list using the following parameters: - * - * options: an optional list of space-separated options that will change how - * the callback list behaves or a more traditional option object - * - * By default a callback list will act like an event callback list and can be - * "fired" multiple times. - * - * Possible options: - * - * once: will ensure the callback list can only be fired once (like a Deferred) - * - * memory: will keep track of previous values and will call any callback added - * after the list has been fired right away with the latest "memorized" - * values (like a Deferred) - * - * unique: will ensure a callback can only be added once (no duplicate in the list) - * - * stopOnFalse: interrupt callings when a callback returns false - * - */ -jQuery.Callbacks = function( options ) { - - // Convert options from String-formatted to Object-formatted if needed - // (we check in cache first) - options = typeof options === "string" ? - createOptions( options ) : - jQuery.extend( {}, options ); - - var // Flag to know if list is currently firing - firing, - - // Last fire value for non-forgettable lists - memory, - - // Flag to know if list was already fired - fired, - - // Flag to prevent firing - locked, - - // Actual callback list - list = [], - - // Queue of execution data for repeatable lists - queue = [], - - // Index of currently firing callback (modified by add/remove as needed) - firingIndex = -1, - - // Fire callbacks - fire = function() { - - // Enforce single-firing - locked = locked || options.once; - - // Execute callbacks for all pending executions, - // respecting firingIndex overrides and runtime changes - fired = firing = true; - for ( ; queue.length; firingIndex = -1 ) { - memory = queue.shift(); - while ( ++firingIndex < list.length ) { - - // Run callback and check for early termination - if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && - options.stopOnFalse ) { - - // Jump to end and forget the data so .add doesn't re-fire - firingIndex = list.length; - memory = false; - } - } - } - - // Forget the data if we're done with it - if ( !options.memory ) { - memory = false; - } - - firing = false; - - // Clean up if we're done firing for good - if ( locked ) { - - // Keep an empty list if we have data for future add calls - if ( memory ) { - list = []; - - // Otherwise, this object is spent - } else { - list = ""; - } - } - }, - - // Actual Callbacks object - self = { - - // Add a callback or a collection of callbacks to the list - add: function() { - if ( list ) { - - // If we have memory from a past run, we should fire after adding - if ( memory && !firing ) { - firingIndex = list.length - 1; - queue.push( memory ); - } - - ( function add( args ) { - jQuery.each( args, function( _, arg ) { - if ( isFunction( arg ) ) { - if ( !options.unique || !self.has( arg ) ) { - list.push( arg ); - } - } else if ( arg && arg.length && toType( arg ) !== "string" ) { - - // Inspect recursively - add( arg ); - } - } ); - } )( arguments ); - - if ( memory && !firing ) { - fire(); - } - } - return this; - }, - - // Remove a callback from the list - remove: function() { - jQuery.each( arguments, function( _, arg ) { - var index; - while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { - list.splice( index, 1 ); - - // Handle firing indexes - if ( index <= firingIndex ) { - firingIndex--; - } - } - } ); - return this; - }, - - // Check if a given callback is in the list. - // If no argument is given, return whether or not list has callbacks attached. - has: function( fn ) { - return fn ? - jQuery.inArray( fn, list ) > -1 : - list.length > 0; - }, - - // Remove all callbacks from the list - empty: function() { - if ( list ) { - list = []; - } - return this; - }, - - // Disable .fire and .add - // Abort any current/pending executions - // Clear all callbacks and values - disable: function() { - locked = queue = []; - list = memory = ""; - return this; - }, - disabled: function() { - return !list; - }, - - // Disable .fire - // Also disable .add unless we have memory (since it would have no effect) - // Abort any pending executions - lock: function() { - locked = queue = []; - if ( !memory && !firing ) { - list = memory = ""; - } - return this; - }, - locked: function() { - return !!locked; - }, - - // Call all callbacks with the given context and arguments - fireWith: function( context, args ) { - if ( !locked ) { - args = args || []; - args = [ context, args.slice ? args.slice() : args ]; - queue.push( args ); - if ( !firing ) { - fire(); - } - } - return this; - }, - - // Call all the callbacks with the given arguments - fire: function() { - self.fireWith( this, arguments ); - return this; - }, - - // To know if the callbacks have already been called at least once - fired: function() { - return !!fired; - } - }; - - return self; -}; - - -function Identity( v ) { - return v; -} -function Thrower( ex ) { - throw ex; -} - -function adoptValue( value, resolve, reject, noValue ) { - var method; - - try { - - // Check for promise aspect first to privilege synchronous behavior - if ( value && isFunction( ( method = value.promise ) ) ) { - method.call( value ).done( resolve ).fail( reject ); - - // Other thenables - } else if ( value && isFunction( ( method = value.then ) ) ) { - method.call( value, resolve, reject ); - - // Other non-thenables - } else { - - // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: - // * false: [ value ].slice( 0 ) => resolve( value ) - // * true: [ value ].slice( 1 ) => resolve() - resolve.apply( undefined, [ value ].slice( noValue ) ); - } - - // For Promises/A+, convert exceptions into rejections - // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in - // Deferred#then to conditionally suppress rejection. - } catch ( value ) { - - // Support: Android 4.0 only - // Strict mode functions invoked without .call/.apply get global-object context - reject.apply( undefined, [ value ] ); - } -} - -jQuery.extend( { - - Deferred: function( func ) { - var tuples = [ - - // action, add listener, callbacks, - // ... .then handlers, argument index, [final state] - [ "notify", "progress", jQuery.Callbacks( "memory" ), - jQuery.Callbacks( "memory" ), 2 ], - [ "resolve", "done", jQuery.Callbacks( "once memory" ), - jQuery.Callbacks( "once memory" ), 0, "resolved" ], - [ "reject", "fail", jQuery.Callbacks( "once memory" ), - jQuery.Callbacks( "once memory" ), 1, "rejected" ] - ], - state = "pending", - promise = { - state: function() { - return state; - }, - always: function() { - deferred.done( arguments ).fail( arguments ); - return this; - }, - "catch": function( fn ) { - return promise.then( null, fn ); - }, - - // Keep pipe for back-compat - pipe: function( /* fnDone, fnFail, fnProgress */ ) { - var fns = arguments; - - return jQuery.Deferred( function( newDefer ) { - jQuery.each( tuples, function( _i, tuple ) { - - // Map tuples (progress, done, fail) to arguments (done, fail, progress) - var fn = isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; - - // deferred.progress(function() { bind to newDefer or newDefer.notify }) - // deferred.done(function() { bind to newDefer or newDefer.resolve }) - // deferred.fail(function() { bind to newDefer or newDefer.reject }) - deferred[ tuple[ 1 ] ]( function() { - var returned = fn && fn.apply( this, arguments ); - if ( returned && isFunction( returned.promise ) ) { - returned.promise() - .progress( newDefer.notify ) - .done( newDefer.resolve ) - .fail( newDefer.reject ); - } else { - newDefer[ tuple[ 0 ] + "With" ]( - this, - fn ? [ returned ] : arguments - ); - } - } ); - } ); - fns = null; - } ).promise(); - }, - then: function( onFulfilled, onRejected, onProgress ) { - var maxDepth = 0; - function resolve( depth, deferred, handler, special ) { - return function() { - var that = this, - args = arguments, - mightThrow = function() { - var returned, then; - - // Support: Promises/A+ section 2.3.3.3.3 - // https://promisesaplus.com/#point-59 - // Ignore double-resolution attempts - if ( depth < maxDepth ) { - return; - } - - returned = handler.apply( that, args ); - - // Support: Promises/A+ section 2.3.1 - // https://promisesaplus.com/#point-48 - if ( returned === deferred.promise() ) { - throw new TypeError( "Thenable self-resolution" ); - } - - // Support: Promises/A+ sections 2.3.3.1, 3.5 - // https://promisesaplus.com/#point-54 - // https://promisesaplus.com/#point-75 - // Retrieve `then` only once - then = returned && - - // Support: Promises/A+ section 2.3.4 - // https://promisesaplus.com/#point-64 - // Only check objects and functions for thenability - ( typeof returned === "object" || - typeof returned === "function" ) && - returned.then; - - // Handle a returned thenable - if ( isFunction( then ) ) { - - // Special processors (notify) just wait for resolution - if ( special ) { - then.call( - returned, - resolve( maxDepth, deferred, Identity, special ), - resolve( maxDepth, deferred, Thrower, special ) - ); - - // Normal processors (resolve) also hook into progress - } else { - - // ...and disregard older resolution values - maxDepth++; - - then.call( - returned, - resolve( maxDepth, deferred, Identity, special ), - resolve( maxDepth, deferred, Thrower, special ), - resolve( maxDepth, deferred, Identity, - deferred.notifyWith ) - ); - } - - // Handle all other returned values - } else { - - // Only substitute handlers pass on context - // and multiple values (non-spec behavior) - if ( handler !== Identity ) { - that = undefined; - args = [ returned ]; - } - - // Process the value(s) - // Default process is resolve - ( special || deferred.resolveWith )( that, args ); - } - }, - - // Only normal processors (resolve) catch and reject exceptions - process = special ? - mightThrow : - function() { - try { - mightThrow(); - } catch ( e ) { - - if ( jQuery.Deferred.exceptionHook ) { - jQuery.Deferred.exceptionHook( e, - process.stackTrace ); - } - - // Support: Promises/A+ section 2.3.3.3.4.1 - // https://promisesaplus.com/#point-61 - // Ignore post-resolution exceptions - if ( depth + 1 >= maxDepth ) { - - // Only substitute handlers pass on context - // and multiple values (non-spec behavior) - if ( handler !== Thrower ) { - that = undefined; - args = [ e ]; - } - - deferred.rejectWith( that, args ); - } - } - }; - - // Support: Promises/A+ section 2.3.3.3.1 - // https://promisesaplus.com/#point-57 - // Re-resolve promises immediately to dodge false rejection from - // subsequent errors - if ( depth ) { - process(); - } else { - - // Call an optional hook to record the stack, in case of exception - // since it's otherwise lost when execution goes async - if ( jQuery.Deferred.getStackHook ) { - process.stackTrace = jQuery.Deferred.getStackHook(); - } - window.setTimeout( process ); - } - }; - } - - return jQuery.Deferred( function( newDefer ) { - - // progress_handlers.add( ... ) - tuples[ 0 ][ 3 ].add( - resolve( - 0, - newDefer, - isFunction( onProgress ) ? - onProgress : - Identity, - newDefer.notifyWith - ) - ); - - // fulfilled_handlers.add( ... ) - tuples[ 1 ][ 3 ].add( - resolve( - 0, - newDefer, - isFunction( onFulfilled ) ? - onFulfilled : - Identity - ) - ); - - // rejected_handlers.add( ... ) - tuples[ 2 ][ 3 ].add( - resolve( - 0, - newDefer, - isFunction( onRejected ) ? - onRejected : - Thrower - ) - ); - } ).promise(); - }, - - // Get a promise for this deferred - // If obj is provided, the promise aspect is added to the object - promise: function( obj ) { - return obj != null ? jQuery.extend( obj, promise ) : promise; - } - }, - deferred = {}; - - // Add list-specific methods - jQuery.each( tuples, function( i, tuple ) { - var list = tuple[ 2 ], - stateString = tuple[ 5 ]; - - // promise.progress = list.add - // promise.done = list.add - // promise.fail = list.add - promise[ tuple[ 1 ] ] = list.add; - - // Handle state - if ( stateString ) { - list.add( - function() { - - // state = "resolved" (i.e., fulfilled) - // state = "rejected" - state = stateString; - }, - - // rejected_callbacks.disable - // fulfilled_callbacks.disable - tuples[ 3 - i ][ 2 ].disable, - - // rejected_handlers.disable - // fulfilled_handlers.disable - tuples[ 3 - i ][ 3 ].disable, - - // progress_callbacks.lock - tuples[ 0 ][ 2 ].lock, - - // progress_handlers.lock - tuples[ 0 ][ 3 ].lock - ); - } - - // progress_handlers.fire - // fulfilled_handlers.fire - // rejected_handlers.fire - list.add( tuple[ 3 ].fire ); - - // deferred.notify = function() { deferred.notifyWith(...) } - // deferred.resolve = function() { deferred.resolveWith(...) } - // deferred.reject = function() { deferred.rejectWith(...) } - deferred[ tuple[ 0 ] ] = function() { - deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); - return this; - }; - - // deferred.notifyWith = list.fireWith - // deferred.resolveWith = list.fireWith - // deferred.rejectWith = list.fireWith - deferred[ tuple[ 0 ] + "With" ] = list.fireWith; - } ); - - // Make the deferred a promise - promise.promise( deferred ); - - // Call given func if any - if ( func ) { - func.call( deferred, deferred ); - } - - // All done! - return deferred; - }, - - // Deferred helper - when: function( singleValue ) { - var - - // count of uncompleted subordinates - remaining = arguments.length, - - // count of unprocessed arguments - i = remaining, - - // subordinate fulfillment data - resolveContexts = Array( i ), - resolveValues = slice.call( arguments ), - - // the master Deferred - master = jQuery.Deferred(), - - // subordinate callback factory - updateFunc = function( i ) { - return function( value ) { - resolveContexts[ i ] = this; - resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; - if ( !( --remaining ) ) { - master.resolveWith( resolveContexts, resolveValues ); - } - }; - }; - - // Single- and empty arguments are adopted like Promise.resolve - if ( remaining <= 1 ) { - adoptValue( singleValue, master.done( updateFunc( i ) ).resolve, master.reject, - !remaining ); - - // Use .then() to unwrap secondary thenables (cf. gh-3000) - if ( master.state() === "pending" || - isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { - - return master.then(); - } - } - - // Multiple arguments are aggregated like Promise.all array elements - while ( i-- ) { - adoptValue( resolveValues[ i ], updateFunc( i ), master.reject ); - } - - return master.promise(); - } -} ); - - -// These usually indicate a programmer mistake during development, -// warn about them ASAP rather than swallowing them by default. -var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; - -jQuery.Deferred.exceptionHook = function( error, stack ) { - - // Support: IE 8 - 9 only - // Console exists when dev tools are open, which can happen at any time - if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { - window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); - } -}; - - - - -jQuery.readyException = function( error ) { - window.setTimeout( function() { - throw error; - } ); -}; - - - - -// The deferred used on DOM ready -var readyList = jQuery.Deferred(); - -jQuery.fn.ready = function( fn ) { - - readyList - .then( fn ) - - // Wrap jQuery.readyException in a function so that the lookup - // happens at the time of error handling instead of callback - // registration. - .catch( function( error ) { - jQuery.readyException( error ); - } ); - - return this; -}; - -jQuery.extend( { - - // Is the DOM ready to be used? Set to true once it occurs. - isReady: false, - - // A counter to track how many items to wait for before - // the ready event fires. See #6781 - readyWait: 1, - - // Handle when the DOM is ready - ready: function( wait ) { - - // Abort if there are pending holds or we're already ready - if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { - return; - } - - // Remember that the DOM is ready - jQuery.isReady = true; - - // If a normal DOM Ready event fired, decrement, and wait if need be - if ( wait !== true && --jQuery.readyWait > 0 ) { - return; - } - - // If there are functions bound, to execute - readyList.resolveWith( document, [ jQuery ] ); - } -} ); - -jQuery.ready.then = readyList.then; - -// The ready event handler and self cleanup method -function completed() { - document.removeEventListener( "DOMContentLoaded", completed ); - window.removeEventListener( "load", completed ); - jQuery.ready(); -} - -// Catch cases where $(document).ready() is called -// after the browser event has already occurred. -// Support: IE <=9 - 10 only -// Older IE sometimes signals "interactive" too soon -if ( document.readyState === "complete" || - ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { - - // Handle it asynchronously to allow scripts the opportunity to delay ready - window.setTimeout( jQuery.ready ); - -} else { - - // Use the handy event callback - document.addEventListener( "DOMContentLoaded", completed ); - - // A fallback to window.onload, that will always work - window.addEventListener( "load", completed ); -} - - - - -// Multifunctional method to get and set values of a collection -// The value/s can optionally be executed if it's a function -var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { - var i = 0, - len = elems.length, - bulk = key == null; - - // Sets many values - if ( toType( key ) === "object" ) { - chainable = true; - for ( i in key ) { - access( elems, fn, i, key[ i ], true, emptyGet, raw ); - } - - // Sets one value - } else if ( value !== undefined ) { - chainable = true; - - if ( !isFunction( value ) ) { - raw = true; - } - - if ( bulk ) { - - // Bulk operations run against the entire set - if ( raw ) { - fn.call( elems, value ); - fn = null; - - // ...except when executing function values - } else { - bulk = fn; - fn = function( elem, _key, value ) { - return bulk.call( jQuery( elem ), value ); - }; - } - } - - if ( fn ) { - for ( ; i < len; i++ ) { - fn( - elems[ i ], key, raw ? - value : - value.call( elems[ i ], i, fn( elems[ i ], key ) ) - ); - } - } - } - - if ( chainable ) { - return elems; - } - - // Gets - if ( bulk ) { - return fn.call( elems ); - } - - return len ? fn( elems[ 0 ], key ) : emptyGet; -}; - - -// Matches dashed string for camelizing -var rmsPrefix = /^-ms-/, - rdashAlpha = /-([a-z])/g; - -// Used by camelCase as callback to replace() -function fcamelCase( _all, letter ) { - return letter.toUpperCase(); -} - -// Convert dashed to camelCase; used by the css and data modules -// Support: IE <=9 - 11, Edge 12 - 15 -// Microsoft forgot to hump their vendor prefix (#9572) -function camelCase( string ) { - return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); -} -var acceptData = function( owner ) { - - // Accepts only: - // - Node - // - Node.ELEMENT_NODE - // - Node.DOCUMENT_NODE - // - Object - // - Any - return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); -}; - - - - -function Data() { - this.expando = jQuery.expando + Data.uid++; -} - -Data.uid = 1; - -Data.prototype = { - - cache: function( owner ) { - - // Check if the owner object already has a cache - var value = owner[ this.expando ]; - - // If not, create one - if ( !value ) { - value = {}; - - // We can accept data for non-element nodes in modern browsers, - // but we should not, see #8335. - // Always return an empty object. - if ( acceptData( owner ) ) { - - // If it is a node unlikely to be stringify-ed or looped over - // use plain assignment - if ( owner.nodeType ) { - owner[ this.expando ] = value; - - // Otherwise secure it in a non-enumerable property - // configurable must be true to allow the property to be - // deleted when data is removed - } else { - Object.defineProperty( owner, this.expando, { - value: value, - configurable: true - } ); - } - } - } - - return value; - }, - set: function( owner, data, value ) { - var prop, - cache = this.cache( owner ); - - // Handle: [ owner, key, value ] args - // Always use camelCase key (gh-2257) - if ( typeof data === "string" ) { - cache[ camelCase( data ) ] = value; - - // Handle: [ owner, { properties } ] args - } else { - - // Copy the properties one-by-one to the cache object - for ( prop in data ) { - cache[ camelCase( prop ) ] = data[ prop ]; - } - } - return cache; - }, - get: function( owner, key ) { - return key === undefined ? - this.cache( owner ) : - - // Always use camelCase key (gh-2257) - owner[ this.expando ] && owner[ this.expando ][ camelCase( key ) ]; - }, - access: function( owner, key, value ) { - - // In cases where either: - // - // 1. No key was specified - // 2. A string key was specified, but no value provided - // - // Take the "read" path and allow the get method to determine - // which value to return, respectively either: - // - // 1. The entire cache object - // 2. The data stored at the key - // - if ( key === undefined || - ( ( key && typeof key === "string" ) && value === undefined ) ) { - - return this.get( owner, key ); - } - - // When the key is not a string, or both a key and value - // are specified, set or extend (existing objects) with either: - // - // 1. An object of properties - // 2. A key and value - // - this.set( owner, key, value ); - - // Since the "set" path can have two possible entry points - // return the expected data based on which path was taken[*] - return value !== undefined ? value : key; - }, - remove: function( owner, key ) { - var i, - cache = owner[ this.expando ]; - - if ( cache === undefined ) { - return; - } - - if ( key !== undefined ) { - - // Support array or space separated string of keys - if ( Array.isArray( key ) ) { - - // If key is an array of keys... - // We always set camelCase keys, so remove that. - key = key.map( camelCase ); - } else { - key = camelCase( key ); - - // If a key with the spaces exists, use it. - // Otherwise, create an array by matching non-whitespace - key = key in cache ? - [ key ] : - ( key.match( rnothtmlwhite ) || [] ); - } - - i = key.length; - - while ( i-- ) { - delete cache[ key[ i ] ]; - } - } - - // Remove the expando if there's no more data - if ( key === undefined || jQuery.isEmptyObject( cache ) ) { - - // Support: Chrome <=35 - 45 - // Webkit & Blink performance suffers when deleting properties - // from DOM nodes, so set to undefined instead - // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) - if ( owner.nodeType ) { - owner[ this.expando ] = undefined; - } else { - delete owner[ this.expando ]; - } - } - }, - hasData: function( owner ) { - var cache = owner[ this.expando ]; - return cache !== undefined && !jQuery.isEmptyObject( cache ); - } -}; -var dataPriv = new Data(); - -var dataUser = new Data(); - - - -// Implementation Summary -// -// 1. Enforce API surface and semantic compatibility with 1.9.x branch -// 2. Improve the module's maintainability by reducing the storage -// paths to a single mechanism. -// 3. Use the same single mechanism to support "private" and "user" data. -// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) -// 5. Avoid exposing implementation details on user objects (eg. expando properties) -// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 - -var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, - rmultiDash = /[A-Z]/g; - -function getData( data ) { - if ( data === "true" ) { - return true; - } - - if ( data === "false" ) { - return false; - } - - if ( data === "null" ) { - return null; - } - - // Only convert to a number if it doesn't change the string - if ( data === +data + "" ) { - return +data; - } - - if ( rbrace.test( data ) ) { - return JSON.parse( data ); - } - - return data; -} - -function dataAttr( elem, key, data ) { - var name; - - // If nothing was found internally, try to fetch any - // data from the HTML5 data-* attribute - if ( data === undefined && elem.nodeType === 1 ) { - name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); - data = elem.getAttribute( name ); - - if ( typeof data === "string" ) { - try { - data = getData( data ); - } catch ( e ) {} - - // Make sure we set the data so it isn't changed later - dataUser.set( elem, key, data ); - } else { - data = undefined; - } - } - return data; -} - -jQuery.extend( { - hasData: function( elem ) { - return dataUser.hasData( elem ) || dataPriv.hasData( elem ); - }, - - data: function( elem, name, data ) { - return dataUser.access( elem, name, data ); - }, - - removeData: function( elem, name ) { - dataUser.remove( elem, name ); - }, - - // TODO: Now that all calls to _data and _removeData have been replaced - // with direct calls to dataPriv methods, these can be deprecated. - _data: function( elem, name, data ) { - return dataPriv.access( elem, name, data ); - }, - - _removeData: function( elem, name ) { - dataPriv.remove( elem, name ); - } -} ); - -jQuery.fn.extend( { - data: function( key, value ) { - var i, name, data, - elem = this[ 0 ], - attrs = elem && elem.attributes; - - // Gets all values - if ( key === undefined ) { - if ( this.length ) { - data = dataUser.get( elem ); - - if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { - i = attrs.length; - while ( i-- ) { - - // Support: IE 11 only - // The attrs elements can be null (#14894) - if ( attrs[ i ] ) { - name = attrs[ i ].name; - if ( name.indexOf( "data-" ) === 0 ) { - name = camelCase( name.slice( 5 ) ); - dataAttr( elem, name, data[ name ] ); - } - } - } - dataPriv.set( elem, "hasDataAttrs", true ); - } - } - - return data; - } - - // Sets multiple values - if ( typeof key === "object" ) { - return this.each( function() { - dataUser.set( this, key ); - } ); - } - - return access( this, function( value ) { - var data; - - // The calling jQuery object (element matches) is not empty - // (and therefore has an element appears at this[ 0 ]) and the - // `value` parameter was not undefined. An empty jQuery object - // will result in `undefined` for elem = this[ 0 ] which will - // throw an exception if an attempt to read a data cache is made. - if ( elem && value === undefined ) { - - // Attempt to get data from the cache - // The key will always be camelCased in Data - data = dataUser.get( elem, key ); - if ( data !== undefined ) { - return data; - } - - // Attempt to "discover" the data in - // HTML5 custom data-* attrs - data = dataAttr( elem, key ); - if ( data !== undefined ) { - return data; - } - - // We tried really hard, but the data doesn't exist. - return; - } - - // Set the data... - this.each( function() { - - // We always store the camelCased key - dataUser.set( this, key, value ); - } ); - }, null, value, arguments.length > 1, null, true ); - }, - - removeData: function( key ) { - return this.each( function() { - dataUser.remove( this, key ); - } ); - } -} ); - - -jQuery.extend( { - queue: function( elem, type, data ) { - var queue; - - if ( elem ) { - type = ( type || "fx" ) + "queue"; - queue = dataPriv.get( elem, type ); - - // Speed up dequeue by getting out quickly if this is just a lookup - if ( data ) { - if ( !queue || Array.isArray( data ) ) { - queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); - } else { - queue.push( data ); - } - } - return queue || []; - } - }, - - dequeue: function( elem, type ) { - type = type || "fx"; - - var queue = jQuery.queue( elem, type ), - startLength = queue.length, - fn = queue.shift(), - hooks = jQuery._queueHooks( elem, type ), - next = function() { - jQuery.dequeue( elem, type ); - }; - - // If the fx queue is dequeued, always remove the progress sentinel - if ( fn === "inprogress" ) { - fn = queue.shift(); - startLength--; - } - - if ( fn ) { - - // Add a progress sentinel to prevent the fx queue from being - // automatically dequeued - if ( type === "fx" ) { - queue.unshift( "inprogress" ); - } - - // Clear up the last queue stop function - delete hooks.stop; - fn.call( elem, next, hooks ); - } - - if ( !startLength && hooks ) { - hooks.empty.fire(); - } - }, - - // Not public - generate a queueHooks object, or return the current one - _queueHooks: function( elem, type ) { - var key = type + "queueHooks"; - return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { - empty: jQuery.Callbacks( "once memory" ).add( function() { - dataPriv.remove( elem, [ type + "queue", key ] ); - } ) - } ); - } -} ); - -jQuery.fn.extend( { - queue: function( type, data ) { - var setter = 2; - - if ( typeof type !== "string" ) { - data = type; - type = "fx"; - setter--; - } - - if ( arguments.length < setter ) { - return jQuery.queue( this[ 0 ], type ); - } - - return data === undefined ? - this : - this.each( function() { - var queue = jQuery.queue( this, type, data ); - - // Ensure a hooks for this queue - jQuery._queueHooks( this, type ); - - if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { - jQuery.dequeue( this, type ); - } - } ); - }, - dequeue: function( type ) { - return this.each( function() { - jQuery.dequeue( this, type ); - } ); - }, - clearQueue: function( type ) { - return this.queue( type || "fx", [] ); - }, - - // Get a promise resolved when queues of a certain type - // are emptied (fx is the type by default) - promise: function( type, obj ) { - var tmp, - count = 1, - defer = jQuery.Deferred(), - elements = this, - i = this.length, - resolve = function() { - if ( !( --count ) ) { - defer.resolveWith( elements, [ elements ] ); - } - }; - - if ( typeof type !== "string" ) { - obj = type; - type = undefined; - } - type = type || "fx"; - - while ( i-- ) { - tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); - if ( tmp && tmp.empty ) { - count++; - tmp.empty.add( resolve ); - } - } - resolve(); - return defer.promise( obj ); - } -} ); -var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; - -var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); - - -var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; - -var documentElement = document.documentElement; - - - - var isAttached = function( elem ) { - return jQuery.contains( elem.ownerDocument, elem ); - }, - composed = { composed: true }; - - // Support: IE 9 - 11+, Edge 12 - 18+, iOS 10.0 - 10.2 only - // Check attachment across shadow DOM boundaries when possible (gh-3504) - // Support: iOS 10.0-10.2 only - // Early iOS 10 versions support `attachShadow` but not `getRootNode`, - // leading to errors. We need to check for `getRootNode`. - if ( documentElement.getRootNode ) { - isAttached = function( elem ) { - return jQuery.contains( elem.ownerDocument, elem ) || - elem.getRootNode( composed ) === elem.ownerDocument; - }; - } -var isHiddenWithinTree = function( elem, el ) { - - // isHiddenWithinTree might be called from jQuery#filter function; - // in that case, element will be second argument - elem = el || elem; - - // Inline style trumps all - return elem.style.display === "none" || - elem.style.display === "" && - - // Otherwise, check computed style - // Support: Firefox <=43 - 45 - // Disconnected elements can have computed display: none, so first confirm that elem is - // in the document. - isAttached( elem ) && - - jQuery.css( elem, "display" ) === "none"; - }; - - - -function adjustCSS( elem, prop, valueParts, tween ) { - var adjusted, scale, - maxIterations = 20, - currentValue = tween ? - function() { - return tween.cur(); - } : - function() { - return jQuery.css( elem, prop, "" ); - }, - initial = currentValue(), - unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), - - // Starting value computation is required for potential unit mismatches - initialInUnit = elem.nodeType && - ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && - rcssNum.exec( jQuery.css( elem, prop ) ); - - if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { - - // Support: Firefox <=54 - // Halve the iteration target value to prevent interference from CSS upper bounds (gh-2144) - initial = initial / 2; - - // Trust units reported by jQuery.css - unit = unit || initialInUnit[ 3 ]; - - // Iteratively approximate from a nonzero starting point - initialInUnit = +initial || 1; - - while ( maxIterations-- ) { - - // Evaluate and update our best guess (doubling guesses that zero out). - // Finish if the scale equals or crosses 1 (making the old*new product non-positive). - jQuery.style( elem, prop, initialInUnit + unit ); - if ( ( 1 - scale ) * ( 1 - ( scale = currentValue() / initial || 0.5 ) ) <= 0 ) { - maxIterations = 0; - } - initialInUnit = initialInUnit / scale; - - } - - initialInUnit = initialInUnit * 2; - jQuery.style( elem, prop, initialInUnit + unit ); - - // Make sure we update the tween properties later on - valueParts = valueParts || []; - } - - if ( valueParts ) { - initialInUnit = +initialInUnit || +initial || 0; - - // Apply relative offset (+=/-=) if specified - adjusted = valueParts[ 1 ] ? - initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : - +valueParts[ 2 ]; - if ( tween ) { - tween.unit = unit; - tween.start = initialInUnit; - tween.end = adjusted; - } - } - return adjusted; -} - - -var defaultDisplayMap = {}; - -function getDefaultDisplay( elem ) { - var temp, - doc = elem.ownerDocument, - nodeName = elem.nodeName, - display = defaultDisplayMap[ nodeName ]; - - if ( display ) { - return display; - } - - temp = doc.body.appendChild( doc.createElement( nodeName ) ); - display = jQuery.css( temp, "display" ); - - temp.parentNode.removeChild( temp ); - - if ( display === "none" ) { - display = "block"; - } - defaultDisplayMap[ nodeName ] = display; - - return display; -} - -function showHide( elements, show ) { - var display, elem, - values = [], - index = 0, - length = elements.length; - - // Determine new display value for elements that need to change - for ( ; index < length; index++ ) { - elem = elements[ index ]; - if ( !elem.style ) { - continue; - } - - display = elem.style.display; - if ( show ) { - - // Since we force visibility upon cascade-hidden elements, an immediate (and slow) - // check is required in this first loop unless we have a nonempty display value (either - // inline or about-to-be-restored) - if ( display === "none" ) { - values[ index ] = dataPriv.get( elem, "display" ) || null; - if ( !values[ index ] ) { - elem.style.display = ""; - } - } - if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { - values[ index ] = getDefaultDisplay( elem ); - } - } else { - if ( display !== "none" ) { - values[ index ] = "none"; - - // Remember what we're overwriting - dataPriv.set( elem, "display", display ); - } - } - } - - // Set the display of the elements in a second loop to avoid constant reflow - for ( index = 0; index < length; index++ ) { - if ( values[ index ] != null ) { - elements[ index ].style.display = values[ index ]; - } - } - - return elements; -} - -jQuery.fn.extend( { - show: function() { - return showHide( this, true ); - }, - hide: function() { - return showHide( this ); - }, - toggle: function( state ) { - if ( typeof state === "boolean" ) { - return state ? this.show() : this.hide(); - } - - return this.each( function() { - if ( isHiddenWithinTree( this ) ) { - jQuery( this ).show(); - } else { - jQuery( this ).hide(); - } - } ); - } -} ); -var rcheckableType = ( /^(?:checkbox|radio)$/i ); - -var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]*)/i ); - -var rscriptType = ( /^$|^module$|\/(?:java|ecma)script/i ); - - - -( function() { - var fragment = document.createDocumentFragment(), - div = fragment.appendChild( document.createElement( "div" ) ), - input = document.createElement( "input" ); - - // Support: Android 4.0 - 4.3 only - // Check state lost if the name is set (#11217) - // Support: Windows Web Apps (WWA) - // `name` and `type` must use .setAttribute for WWA (#14901) - input.setAttribute( "type", "radio" ); - input.setAttribute( "checked", "checked" ); - input.setAttribute( "name", "t" ); - - div.appendChild( input ); - - // Support: Android <=4.1 only - // Older WebKit doesn't clone checked state correctly in fragments - support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; - - // Support: IE <=11 only - // Make sure textarea (and checkbox) defaultValue is properly cloned - div.innerHTML = ""; - support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; - - // Support: IE <=9 only - // IE <=9 replaces "; - support.option = !!div.lastChild; -} )(); - - -// We have to close these tags to support XHTML (#13200) -var wrapMap = { - - // XHTML parsers do not magically insert elements in the - // same way that tag soup parsers do. So we cannot shorten - // this by omitting or other required elements. - thead: [ 1, "", "
" ], - col: [ 2, "", "
" ], - tr: [ 2, "", "
" ], - td: [ 3, "", "
" ], - - _default: [ 0, "", "" ] -}; - -wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; -wrapMap.th = wrapMap.td; - -// Support: IE <=9 only -if ( !support.option ) { - wrapMap.optgroup = wrapMap.option = [ 1, "" ]; -} - - -function getAll( context, tag ) { - - // Support: IE <=9 - 11 only - // Use typeof to avoid zero-argument method invocation on host objects (#15151) - var ret; - - if ( typeof context.getElementsByTagName !== "undefined" ) { - ret = context.getElementsByTagName( tag || "*" ); - - } else if ( typeof context.querySelectorAll !== "undefined" ) { - ret = context.querySelectorAll( tag || "*" ); - - } else { - ret = []; - } - - if ( tag === undefined || tag && nodeName( context, tag ) ) { - return jQuery.merge( [ context ], ret ); - } - - return ret; -} - - -// Mark scripts as having already been evaluated -function setGlobalEval( elems, refElements ) { - var i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - dataPriv.set( - elems[ i ], - "globalEval", - !refElements || dataPriv.get( refElements[ i ], "globalEval" ) - ); - } -} - - -var rhtml = /<|&#?\w+;/; - -function buildFragment( elems, context, scripts, selection, ignored ) { - var elem, tmp, tag, wrap, attached, j, - fragment = context.createDocumentFragment(), - nodes = [], - i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - elem = elems[ i ]; - - if ( elem || elem === 0 ) { - - // Add nodes directly - if ( toType( elem ) === "object" ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); - - // Convert non-html into a text node - } else if ( !rhtml.test( elem ) ) { - nodes.push( context.createTextNode( elem ) ); - - // Convert html into DOM nodes - } else { - tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); - - // Deserialize a standard representation - tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); - wrap = wrapMap[ tag ] || wrapMap._default; - tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; - - // Descend through wrappers to the right content - j = wrap[ 0 ]; - while ( j-- ) { - tmp = tmp.lastChild; - } - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, tmp.childNodes ); - - // Remember the top-level container - tmp = fragment.firstChild; - - // Ensure the created nodes are orphaned (#12392) - tmp.textContent = ""; - } - } - } - - // Remove wrapper from fragment - fragment.textContent = ""; - - i = 0; - while ( ( elem = nodes[ i++ ] ) ) { - - // Skip elements already in the context collection (trac-4087) - if ( selection && jQuery.inArray( elem, selection ) > -1 ) { - if ( ignored ) { - ignored.push( elem ); - } - continue; - } - - attached = isAttached( elem ); - - // Append to fragment - tmp = getAll( fragment.appendChild( elem ), "script" ); - - // Preserve script evaluation history - if ( attached ) { - setGlobalEval( tmp ); - } - - // Capture executables - if ( scripts ) { - j = 0; - while ( ( elem = tmp[ j++ ] ) ) { - if ( rscriptType.test( elem.type || "" ) ) { - scripts.push( elem ); - } - } - } - } - - return fragment; -} - - -var - rkeyEvent = /^key/, - rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/, - rtypenamespace = /^([^.]*)(?:\.(.+)|)/; - -function returnTrue() { - return true; -} - -function returnFalse() { - return false; -} - -// Support: IE <=9 - 11+ -// focus() and blur() are asynchronous, except when they are no-op. -// So expect focus to be synchronous when the element is already active, -// and blur to be synchronous when the element is not already active. -// (focus and blur are always synchronous in other supported browsers, -// this just defines when we can count on it). -function expectSync( elem, type ) { - return ( elem === safeActiveElement() ) === ( type === "focus" ); -} - -// Support: IE <=9 only -// Accessing document.activeElement can throw unexpectedly -// https://bugs.jquery.com/ticket/13393 -function safeActiveElement() { - try { - return document.activeElement; - } catch ( err ) { } -} - -function on( elem, types, selector, data, fn, one ) { - var origFn, type; - - // Types can be a map of types/handlers - if ( typeof types === "object" ) { - - // ( types-Object, selector, data ) - if ( typeof selector !== "string" ) { - - // ( types-Object, data ) - data = data || selector; - selector = undefined; - } - for ( type in types ) { - on( elem, type, selector, data, types[ type ], one ); - } - return elem; - } - - if ( data == null && fn == null ) { - - // ( types, fn ) - fn = selector; - data = selector = undefined; - } else if ( fn == null ) { - if ( typeof selector === "string" ) { - - // ( types, selector, fn ) - fn = data; - data = undefined; - } else { - - // ( types, data, fn ) - fn = data; - data = selector; - selector = undefined; - } - } - if ( fn === false ) { - fn = returnFalse; - } else if ( !fn ) { - return elem; - } - - if ( one === 1 ) { - origFn = fn; - fn = function( event ) { - - // Can use an empty set, since event contains the info - jQuery().off( event ); - return origFn.apply( this, arguments ); - }; - - // Use same guid so caller can remove using origFn - fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); - } - return elem.each( function() { - jQuery.event.add( this, types, fn, data, selector ); - } ); -} - -/* - * Helper functions for managing events -- not part of the public interface. - * Props to Dean Edwards' addEvent library for many of the ideas. - */ -jQuery.event = { - - global: {}, - - add: function( elem, types, handler, data, selector ) { - - var handleObjIn, eventHandle, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.get( elem ); - - // Only attach events to objects that accept data - if ( !acceptData( elem ) ) { - return; - } - - // Caller can pass in an object of custom data in lieu of the handler - if ( handler.handler ) { - handleObjIn = handler; - handler = handleObjIn.handler; - selector = handleObjIn.selector; - } - - // Ensure that invalid selectors throw exceptions at attach time - // Evaluate against documentElement in case elem is a non-element node (e.g., document) - if ( selector ) { - jQuery.find.matchesSelector( documentElement, selector ); - } - - // Make sure that the handler has a unique ID, used to find/remove it later - if ( !handler.guid ) { - handler.guid = jQuery.guid++; - } - - // Init the element's event structure and main handler, if this is the first - if ( !( events = elemData.events ) ) { - events = elemData.events = Object.create( null ); - } - if ( !( eventHandle = elemData.handle ) ) { - eventHandle = elemData.handle = function( e ) { - - // Discard the second event of a jQuery.event.trigger() and - // when an event is called after a page has unloaded - return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? - jQuery.event.dispatch.apply( elem, arguments ) : undefined; - }; - } - - // Handle multiple events separated by a space - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // There *must* be a type, no attaching namespace-only handlers - if ( !type ) { - continue; - } - - // If event changes its type, use the special event handlers for the changed type - special = jQuery.event.special[ type ] || {}; - - // If selector defined, determine special event api type, otherwise given type - type = ( selector ? special.delegateType : special.bindType ) || type; - - // Update special based on newly reset type - special = jQuery.event.special[ type ] || {}; - - // handleObj is passed to all event handlers - handleObj = jQuery.extend( { - type: type, - origType: origType, - data: data, - handler: handler, - guid: handler.guid, - selector: selector, - needsContext: selector && jQuery.expr.match.needsContext.test( selector ), - namespace: namespaces.join( "." ) - }, handleObjIn ); - - // Init the event handler queue if we're the first - if ( !( handlers = events[ type ] ) ) { - handlers = events[ type ] = []; - handlers.delegateCount = 0; - - // Only use addEventListener if the special events handler returns false - if ( !special.setup || - special.setup.call( elem, data, namespaces, eventHandle ) === false ) { - - if ( elem.addEventListener ) { - elem.addEventListener( type, eventHandle ); - } - } - } - - if ( special.add ) { - special.add.call( elem, handleObj ); - - if ( !handleObj.handler.guid ) { - handleObj.handler.guid = handler.guid; - } - } - - // Add to the element's handler list, delegates in front - if ( selector ) { - handlers.splice( handlers.delegateCount++, 0, handleObj ); - } else { - handlers.push( handleObj ); - } - - // Keep track of which events have ever been used, for event optimization - jQuery.event.global[ type ] = true; - } - - }, - - // Detach an event or set of events from an element - remove: function( elem, types, handler, selector, mappedTypes ) { - - var j, origCount, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); - - if ( !elemData || !( events = elemData.events ) ) { - return; - } - - // Once for each type.namespace in types; type may be omitted - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // Unbind all events (on this namespace, if provided) for the element - if ( !type ) { - for ( type in events ) { - jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); - } - continue; - } - - special = jQuery.event.special[ type ] || {}; - type = ( selector ? special.delegateType : special.bindType ) || type; - handlers = events[ type ] || []; - tmp = tmp[ 2 ] && - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); - - // Remove matching events - origCount = j = handlers.length; - while ( j-- ) { - handleObj = handlers[ j ]; - - if ( ( mappedTypes || origType === handleObj.origType ) && - ( !handler || handler.guid === handleObj.guid ) && - ( !tmp || tmp.test( handleObj.namespace ) ) && - ( !selector || selector === handleObj.selector || - selector === "**" && handleObj.selector ) ) { - handlers.splice( j, 1 ); - - if ( handleObj.selector ) { - handlers.delegateCount--; - } - if ( special.remove ) { - special.remove.call( elem, handleObj ); - } - } - } - - // Remove generic event handler if we removed something and no more handlers exist - // (avoids potential for endless recursion during removal of special event handlers) - if ( origCount && !handlers.length ) { - if ( !special.teardown || - special.teardown.call( elem, namespaces, elemData.handle ) === false ) { - - jQuery.removeEvent( elem, type, elemData.handle ); - } - - delete events[ type ]; - } - } - - // Remove data and the expando if it's no longer used - if ( jQuery.isEmptyObject( events ) ) { - dataPriv.remove( elem, "handle events" ); - } - }, - - dispatch: function( nativeEvent ) { - - var i, j, ret, matched, handleObj, handlerQueue, - args = new Array( arguments.length ), - - // Make a writable jQuery.Event from the native event object - event = jQuery.event.fix( nativeEvent ), - - handlers = ( - dataPriv.get( this, "events" ) || Object.create( null ) - )[ event.type ] || [], - special = jQuery.event.special[ event.type ] || {}; - - // Use the fix-ed jQuery.Event rather than the (read-only) native event - args[ 0 ] = event; - - for ( i = 1; i < arguments.length; i++ ) { - args[ i ] = arguments[ i ]; - } - - event.delegateTarget = this; - - // Call the preDispatch hook for the mapped type, and let it bail if desired - if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { - return; - } - - // Determine handlers - handlerQueue = jQuery.event.handlers.call( this, event, handlers ); - - // Run delegates first; they may want to stop propagation beneath us - i = 0; - while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { - event.currentTarget = matched.elem; - - j = 0; - while ( ( handleObj = matched.handlers[ j++ ] ) && - !event.isImmediatePropagationStopped() ) { - - // If the event is namespaced, then each handler is only invoked if it is - // specially universal or its namespaces are a superset of the event's. - if ( !event.rnamespace || handleObj.namespace === false || - event.rnamespace.test( handleObj.namespace ) ) { - - event.handleObj = handleObj; - event.data = handleObj.data; - - ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || - handleObj.handler ).apply( matched.elem, args ); - - if ( ret !== undefined ) { - if ( ( event.result = ret ) === false ) { - event.preventDefault(); - event.stopPropagation(); - } - } - } - } - } - - // Call the postDispatch hook for the mapped type - if ( special.postDispatch ) { - special.postDispatch.call( this, event ); - } - - return event.result; - }, - - handlers: function( event, handlers ) { - var i, handleObj, sel, matchedHandlers, matchedSelectors, - handlerQueue = [], - delegateCount = handlers.delegateCount, - cur = event.target; - - // Find delegate handlers - if ( delegateCount && - - // Support: IE <=9 - // Black-hole SVG instance trees (trac-13180) - cur.nodeType && - - // Support: Firefox <=42 - // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) - // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click - // Support: IE 11 only - // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) - !( event.type === "click" && event.button >= 1 ) ) { - - for ( ; cur !== this; cur = cur.parentNode || this ) { - - // Don't check non-elements (#13208) - // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) - if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { - matchedHandlers = []; - matchedSelectors = {}; - for ( i = 0; i < delegateCount; i++ ) { - handleObj = handlers[ i ]; - - // Don't conflict with Object.prototype properties (#13203) - sel = handleObj.selector + " "; - - if ( matchedSelectors[ sel ] === undefined ) { - matchedSelectors[ sel ] = handleObj.needsContext ? - jQuery( sel, this ).index( cur ) > -1 : - jQuery.find( sel, this, null, [ cur ] ).length; - } - if ( matchedSelectors[ sel ] ) { - matchedHandlers.push( handleObj ); - } - } - if ( matchedHandlers.length ) { - handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); - } - } - } - } - - // Add the remaining (directly-bound) handlers - cur = this; - if ( delegateCount < handlers.length ) { - handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); - } - - return handlerQueue; - }, - - addProp: function( name, hook ) { - Object.defineProperty( jQuery.Event.prototype, name, { - enumerable: true, - configurable: true, - - get: isFunction( hook ) ? - function() { - if ( this.originalEvent ) { - return hook( this.originalEvent ); - } - } : - function() { - if ( this.originalEvent ) { - return this.originalEvent[ name ]; - } - }, - - set: function( value ) { - Object.defineProperty( this, name, { - enumerable: true, - configurable: true, - writable: true, - value: value - } ); - } - } ); - }, - - fix: function( originalEvent ) { - return originalEvent[ jQuery.expando ] ? - originalEvent : - new jQuery.Event( originalEvent ); - }, - - special: { - load: { - - // Prevent triggered image.load events from bubbling to window.load - noBubble: true - }, - click: { - - // Utilize native event to ensure correct state for checkable inputs - setup: function( data ) { - - // For mutual compressibility with _default, replace `this` access with a local var. - // `|| data` is dead code meant only to preserve the variable through minification. - var el = this || data; - - // Claim the first handler - if ( rcheckableType.test( el.type ) && - el.click && nodeName( el, "input" ) ) { - - // dataPriv.set( el, "click", ... ) - leverageNative( el, "click", returnTrue ); - } - - // Return false to allow normal processing in the caller - return false; - }, - trigger: function( data ) { - - // For mutual compressibility with _default, replace `this` access with a local var. - // `|| data` is dead code meant only to preserve the variable through minification. - var el = this || data; - - // Force setup before triggering a click - if ( rcheckableType.test( el.type ) && - el.click && nodeName( el, "input" ) ) { - - leverageNative( el, "click" ); - } - - // Return non-false to allow normal event-path propagation - return true; - }, - - // For cross-browser consistency, suppress native .click() on links - // Also prevent it if we're currently inside a leveraged native-event stack - _default: function( event ) { - var target = event.target; - return rcheckableType.test( target.type ) && - target.click && nodeName( target, "input" ) && - dataPriv.get( target, "click" ) || - nodeName( target, "a" ); - } - }, - - beforeunload: { - postDispatch: function( event ) { - - // Support: Firefox 20+ - // Firefox doesn't alert if the returnValue field is not set. - if ( event.result !== undefined && event.originalEvent ) { - event.originalEvent.returnValue = event.result; - } - } - } - } -}; - -// Ensure the presence of an event listener that handles manually-triggered -// synthetic events by interrupting progress until reinvoked in response to -// *native* events that it fires directly, ensuring that state changes have -// already occurred before other listeners are invoked. -function leverageNative( el, type, expectSync ) { - - // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add - if ( !expectSync ) { - if ( dataPriv.get( el, type ) === undefined ) { - jQuery.event.add( el, type, returnTrue ); - } - return; - } - - // Register the controller as a special universal handler for all event namespaces - dataPriv.set( el, type, false ); - jQuery.event.add( el, type, { - namespace: false, - handler: function( event ) { - var notAsync, result, - saved = dataPriv.get( this, type ); - - if ( ( event.isTrigger & 1 ) && this[ type ] ) { - - // Interrupt processing of the outer synthetic .trigger()ed event - // Saved data should be false in such cases, but might be a leftover capture object - // from an async native handler (gh-4350) - if ( !saved.length ) { - - // Store arguments for use when handling the inner native event - // There will always be at least one argument (an event object), so this array - // will not be confused with a leftover capture object. - saved = slice.call( arguments ); - dataPriv.set( this, type, saved ); - - // Trigger the native event and capture its result - // Support: IE <=9 - 11+ - // focus() and blur() are asynchronous - notAsync = expectSync( this, type ); - this[ type ](); - result = dataPriv.get( this, type ); - if ( saved !== result || notAsync ) { - dataPriv.set( this, type, false ); - } else { - result = {}; - } - if ( saved !== result ) { - - // Cancel the outer synthetic event - event.stopImmediatePropagation(); - event.preventDefault(); - return result.value; - } - - // If this is an inner synthetic event for an event with a bubbling surrogate - // (focus or blur), assume that the surrogate already propagated from triggering the - // native event and prevent that from happening again here. - // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the - // bubbling surrogate propagates *after* the non-bubbling base), but that seems - // less bad than duplication. - } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) { - event.stopPropagation(); - } - - // If this is a native event triggered above, everything is now in order - // Fire an inner synthetic event with the original arguments - } else if ( saved.length ) { - - // ...and capture the result - dataPriv.set( this, type, { - value: jQuery.event.trigger( - - // Support: IE <=9 - 11+ - // Extend with the prototype to reset the above stopImmediatePropagation() - jQuery.extend( saved[ 0 ], jQuery.Event.prototype ), - saved.slice( 1 ), - this - ) - } ); - - // Abort handling of the native event - event.stopImmediatePropagation(); - } - } - } ); -} - -jQuery.removeEvent = function( elem, type, handle ) { - - // This "if" is needed for plain objects - if ( elem.removeEventListener ) { - elem.removeEventListener( type, handle ); - } -}; - -jQuery.Event = function( src, props ) { - - // Allow instantiation without the 'new' keyword - if ( !( this instanceof jQuery.Event ) ) { - return new jQuery.Event( src, props ); - } - - // Event object - if ( src && src.type ) { - this.originalEvent = src; - this.type = src.type; - - // Events bubbling up the document may have been marked as prevented - // by a handler lower down the tree; reflect the correct value. - this.isDefaultPrevented = src.defaultPrevented || - src.defaultPrevented === undefined && - - // Support: Android <=2.3 only - src.returnValue === false ? - returnTrue : - returnFalse; - - // Create target properties - // Support: Safari <=6 - 7 only - // Target should not be a text node (#504, #13143) - this.target = ( src.target && src.target.nodeType === 3 ) ? - src.target.parentNode : - src.target; - - this.currentTarget = src.currentTarget; - this.relatedTarget = src.relatedTarget; - - // Event type - } else { - this.type = src; - } - - // Put explicitly provided properties onto the event object - if ( props ) { - jQuery.extend( this, props ); - } - - // Create a timestamp if incoming event doesn't have one - this.timeStamp = src && src.timeStamp || Date.now(); - - // Mark it as fixed - this[ jQuery.expando ] = true; -}; - -// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding -// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html -jQuery.Event.prototype = { - constructor: jQuery.Event, - isDefaultPrevented: returnFalse, - isPropagationStopped: returnFalse, - isImmediatePropagationStopped: returnFalse, - isSimulated: false, - - preventDefault: function() { - var e = this.originalEvent; - - this.isDefaultPrevented = returnTrue; - - if ( e && !this.isSimulated ) { - e.preventDefault(); - } - }, - stopPropagation: function() { - var e = this.originalEvent; - - this.isPropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopPropagation(); - } - }, - stopImmediatePropagation: function() { - var e = this.originalEvent; - - this.isImmediatePropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopImmediatePropagation(); - } - - this.stopPropagation(); - } -}; - -// Includes all common event props including KeyEvent and MouseEvent specific props -jQuery.each( { - altKey: true, - bubbles: true, - cancelable: true, - changedTouches: true, - ctrlKey: true, - detail: true, - eventPhase: true, - metaKey: true, - pageX: true, - pageY: true, - shiftKey: true, - view: true, - "char": true, - code: true, - charCode: true, - key: true, - keyCode: true, - button: true, - buttons: true, - clientX: true, - clientY: true, - offsetX: true, - offsetY: true, - pointerId: true, - pointerType: true, - screenX: true, - screenY: true, - targetTouches: true, - toElement: true, - touches: true, - - which: function( event ) { - var button = event.button; - - // Add which for key events - if ( event.which == null && rkeyEvent.test( event.type ) ) { - return event.charCode != null ? event.charCode : event.keyCode; - } - - // Add which for click: 1 === left; 2 === middle; 3 === right - if ( !event.which && button !== undefined && rmouseEvent.test( event.type ) ) { - if ( button & 1 ) { - return 1; - } - - if ( button & 2 ) { - return 3; - } - - if ( button & 4 ) { - return 2; - } - - return 0; - } - - return event.which; - } -}, jQuery.event.addProp ); - -jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) { - jQuery.event.special[ type ] = { - - // Utilize native event if possible so blur/focus sequence is correct - setup: function() { - - // Claim the first handler - // dataPriv.set( this, "focus", ... ) - // dataPriv.set( this, "blur", ... ) - leverageNative( this, type, expectSync ); - - // Return false to allow normal processing in the caller - return false; - }, - trigger: function() { - - // Force setup before trigger - leverageNative( this, type ); - - // Return non-false to allow normal event-path propagation - return true; - }, - - delegateType: delegateType - }; -} ); - -// Create mouseenter/leave events using mouseover/out and event-time checks -// so that event delegation works in jQuery. -// Do the same for pointerenter/pointerleave and pointerover/pointerout -// -// Support: Safari 7 only -// Safari sends mouseenter too often; see: -// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 -// for the description of the bug (it existed in older Chrome versions as well). -jQuery.each( { - mouseenter: "mouseover", - mouseleave: "mouseout", - pointerenter: "pointerover", - pointerleave: "pointerout" -}, function( orig, fix ) { - jQuery.event.special[ orig ] = { - delegateType: fix, - bindType: fix, - - handle: function( event ) { - var ret, - target = this, - related = event.relatedTarget, - handleObj = event.handleObj; - - // For mouseenter/leave call the handler if related is outside the target. - // NB: No relatedTarget if the mouse left/entered the browser window - if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { - event.type = handleObj.origType; - ret = handleObj.handler.apply( this, arguments ); - event.type = fix; - } - return ret; - } - }; -} ); - -jQuery.fn.extend( { - - on: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn ); - }, - one: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn, 1 ); - }, - off: function( types, selector, fn ) { - var handleObj, type; - if ( types && types.preventDefault && types.handleObj ) { - - // ( event ) dispatched jQuery.Event - handleObj = types.handleObj; - jQuery( types.delegateTarget ).off( - handleObj.namespace ? - handleObj.origType + "." + handleObj.namespace : - handleObj.origType, - handleObj.selector, - handleObj.handler - ); - return this; - } - if ( typeof types === "object" ) { - - // ( types-object [, selector] ) - for ( type in types ) { - this.off( type, selector, types[ type ] ); - } - return this; - } - if ( selector === false || typeof selector === "function" ) { - - // ( types [, fn] ) - fn = selector; - selector = undefined; - } - if ( fn === false ) { - fn = returnFalse; - } - return this.each( function() { - jQuery.event.remove( this, types, fn, selector ); - } ); - } -} ); - - -var - - // Support: IE <=10 - 11, Edge 12 - 13 only - // In IE/Edge using regex groups here causes severe slowdowns. - // See https://connect.microsoft.com/IE/feedback/details/1736512/ - rnoInnerhtml = /\s*$/g; - -// Prefer a tbody over its parent table for containing new rows -function manipulationTarget( elem, content ) { - if ( nodeName( elem, "table" ) && - nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { - - return jQuery( elem ).children( "tbody" )[ 0 ] || elem; - } - - return elem; -} - -// Replace/restore the type attribute of script elements for safe DOM manipulation -function disableScript( elem ) { - elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; - return elem; -} -function restoreScript( elem ) { - if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) { - elem.type = elem.type.slice( 5 ); - } else { - elem.removeAttribute( "type" ); - } - - return elem; -} - -function cloneCopyEvent( src, dest ) { - var i, l, type, pdataOld, udataOld, udataCur, events; - - if ( dest.nodeType !== 1 ) { - return; - } - - // 1. Copy private data: events, handlers, etc. - if ( dataPriv.hasData( src ) ) { - pdataOld = dataPriv.get( src ); - events = pdataOld.events; - - if ( events ) { - dataPriv.remove( dest, "handle events" ); - - for ( type in events ) { - for ( i = 0, l = events[ type ].length; i < l; i++ ) { - jQuery.event.add( dest, type, events[ type ][ i ] ); - } - } - } - } - - // 2. Copy user data - if ( dataUser.hasData( src ) ) { - udataOld = dataUser.access( src ); - udataCur = jQuery.extend( {}, udataOld ); - - dataUser.set( dest, udataCur ); - } -} - -// Fix IE bugs, see support tests -function fixInput( src, dest ) { - var nodeName = dest.nodeName.toLowerCase(); - - // Fails to persist the checked state of a cloned checkbox or radio button. - if ( nodeName === "input" && rcheckableType.test( src.type ) ) { - dest.checked = src.checked; - - // Fails to return the selected option to the default selected state when cloning options - } else if ( nodeName === "input" || nodeName === "textarea" ) { - dest.defaultValue = src.defaultValue; - } -} - -function domManip( collection, args, callback, ignored ) { - - // Flatten any nested arrays - args = flat( args ); - - var fragment, first, scripts, hasScripts, node, doc, - i = 0, - l = collection.length, - iNoClone = l - 1, - value = args[ 0 ], - valueIsFunction = isFunction( value ); - - // We can't cloneNode fragments that contain checked, in WebKit - if ( valueIsFunction || - ( l > 1 && typeof value === "string" && - !support.checkClone && rchecked.test( value ) ) ) { - return collection.each( function( index ) { - var self = collection.eq( index ); - if ( valueIsFunction ) { - args[ 0 ] = value.call( this, index, self.html() ); - } - domManip( self, args, callback, ignored ); - } ); - } - - if ( l ) { - fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); - first = fragment.firstChild; - - if ( fragment.childNodes.length === 1 ) { - fragment = first; - } - - // Require either new content or an interest in ignored elements to invoke the callback - if ( first || ignored ) { - scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); - hasScripts = scripts.length; - - // Use the original fragment for the last item - // instead of the first because it can end up - // being emptied incorrectly in certain situations (#8070). - for ( ; i < l; i++ ) { - node = fragment; - - if ( i !== iNoClone ) { - node = jQuery.clone( node, true, true ); - - // Keep references to cloned scripts for later restoration - if ( hasScripts ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( scripts, getAll( node, "script" ) ); - } - } - - callback.call( collection[ i ], node, i ); - } - - if ( hasScripts ) { - doc = scripts[ scripts.length - 1 ].ownerDocument; - - // Reenable scripts - jQuery.map( scripts, restoreScript ); - - // Evaluate executable scripts on first document insertion - for ( i = 0; i < hasScripts; i++ ) { - node = scripts[ i ]; - if ( rscriptType.test( node.type || "" ) && - !dataPriv.access( node, "globalEval" ) && - jQuery.contains( doc, node ) ) { - - if ( node.src && ( node.type || "" ).toLowerCase() !== "module" ) { - - // Optional AJAX dependency, but won't run scripts if not present - if ( jQuery._evalUrl && !node.noModule ) { - jQuery._evalUrl( node.src, { - nonce: node.nonce || node.getAttribute( "nonce" ) - }, doc ); - } - } else { - DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc ); - } - } - } - } - } - } - - return collection; -} - -function remove( elem, selector, keepData ) { - var node, - nodes = selector ? jQuery.filter( selector, elem ) : elem, - i = 0; - - for ( ; ( node = nodes[ i ] ) != null; i++ ) { - if ( !keepData && node.nodeType === 1 ) { - jQuery.cleanData( getAll( node ) ); - } - - if ( node.parentNode ) { - if ( keepData && isAttached( node ) ) { - setGlobalEval( getAll( node, "script" ) ); - } - node.parentNode.removeChild( node ); - } - } - - return elem; -} - -jQuery.extend( { - htmlPrefilter: function( html ) { - return html; - }, - - clone: function( elem, dataAndEvents, deepDataAndEvents ) { - var i, l, srcElements, destElements, - clone = elem.cloneNode( true ), - inPage = isAttached( elem ); - - // Fix IE cloning issues - if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && - !jQuery.isXMLDoc( elem ) ) { - - // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 - destElements = getAll( clone ); - srcElements = getAll( elem ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - fixInput( srcElements[ i ], destElements[ i ] ); - } - } - - // Copy the events from the original to the clone - if ( dataAndEvents ) { - if ( deepDataAndEvents ) { - srcElements = srcElements || getAll( elem ); - destElements = destElements || getAll( clone ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - cloneCopyEvent( srcElements[ i ], destElements[ i ] ); - } - } else { - cloneCopyEvent( elem, clone ); - } - } - - // Preserve script evaluation history - destElements = getAll( clone, "script" ); - if ( destElements.length > 0 ) { - setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); - } - - // Return the cloned set - return clone; - }, - - cleanData: function( elems ) { - var data, elem, type, - special = jQuery.event.special, - i = 0; - - for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { - if ( acceptData( elem ) ) { - if ( ( data = elem[ dataPriv.expando ] ) ) { - if ( data.events ) { - for ( type in data.events ) { - if ( special[ type ] ) { - jQuery.event.remove( elem, type ); - - // This is a shortcut to avoid jQuery.event.remove's overhead - } else { - jQuery.removeEvent( elem, type, data.handle ); - } - } - } - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataPriv.expando ] = undefined; - } - if ( elem[ dataUser.expando ] ) { - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataUser.expando ] = undefined; - } - } - } - } -} ); - -jQuery.fn.extend( { - detach: function( selector ) { - return remove( this, selector, true ); - }, - - remove: function( selector ) { - return remove( this, selector ); - }, - - text: function( value ) { - return access( this, function( value ) { - return value === undefined ? - jQuery.text( this ) : - this.empty().each( function() { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - this.textContent = value; - } - } ); - }, null, value, arguments.length ); - }, - - append: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.appendChild( elem ); - } - } ); - }, - - prepend: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.insertBefore( elem, target.firstChild ); - } - } ); - }, - - before: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this ); - } - } ); - }, - - after: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this.nextSibling ); - } - } ); - }, - - empty: function() { - var elem, - i = 0; - - for ( ; ( elem = this[ i ] ) != null; i++ ) { - if ( elem.nodeType === 1 ) { - - // Prevent memory leaks - jQuery.cleanData( getAll( elem, false ) ); - - // Remove any remaining nodes - elem.textContent = ""; - } - } - - return this; - }, - - clone: function( dataAndEvents, deepDataAndEvents ) { - dataAndEvents = dataAndEvents == null ? false : dataAndEvents; - deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; - - return this.map( function() { - return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); - } ); - }, - - html: function( value ) { - return access( this, function( value ) { - var elem = this[ 0 ] || {}, - i = 0, - l = this.length; - - if ( value === undefined && elem.nodeType === 1 ) { - return elem.innerHTML; - } - - // See if we can take a shortcut and just use innerHTML - if ( typeof value === "string" && !rnoInnerhtml.test( value ) && - !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { - - value = jQuery.htmlPrefilter( value ); - - try { - for ( ; i < l; i++ ) { - elem = this[ i ] || {}; - - // Remove element nodes and prevent memory leaks - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - elem.innerHTML = value; - } - } - - elem = 0; - - // If using innerHTML throws an exception, use the fallback method - } catch ( e ) {} - } - - if ( elem ) { - this.empty().append( value ); - } - }, null, value, arguments.length ); - }, - - replaceWith: function() { - var ignored = []; - - // Make the changes, replacing each non-ignored context element with the new content - return domManip( this, arguments, function( elem ) { - var parent = this.parentNode; - - if ( jQuery.inArray( this, ignored ) < 0 ) { - jQuery.cleanData( getAll( this ) ); - if ( parent ) { - parent.replaceChild( elem, this ); - } - } - - // Force callback invocation - }, ignored ); - } -} ); - -jQuery.each( { - appendTo: "append", - prependTo: "prepend", - insertBefore: "before", - insertAfter: "after", - replaceAll: "replaceWith" -}, function( name, original ) { - jQuery.fn[ name ] = function( selector ) { - var elems, - ret = [], - insert = jQuery( selector ), - last = insert.length - 1, - i = 0; - - for ( ; i <= last; i++ ) { - elems = i === last ? this : this.clone( true ); - jQuery( insert[ i ] )[ original ]( elems ); - - // Support: Android <=4.0 only, PhantomJS 1 only - // .get() because push.apply(_, arraylike) throws on ancient WebKit - push.apply( ret, elems.get() ); - } - - return this.pushStack( ret ); - }; -} ); -var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); - -var getStyles = function( elem ) { - - // Support: IE <=11 only, Firefox <=30 (#15098, #14150) - // IE throws on elements created in popups - // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" - var view = elem.ownerDocument.defaultView; - - if ( !view || !view.opener ) { - view = window; - } - - return view.getComputedStyle( elem ); - }; - -var swap = function( elem, options, callback ) { - var ret, name, - old = {}; - - // Remember the old values, and insert the new ones - for ( name in options ) { - old[ name ] = elem.style[ name ]; - elem.style[ name ] = options[ name ]; - } - - ret = callback.call( elem ); - - // Revert the old values - for ( name in options ) { - elem.style[ name ] = old[ name ]; - } - - return ret; -}; - - -var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" ); - - - -( function() { - - // Executing both pixelPosition & boxSizingReliable tests require only one layout - // so they're executed at the same time to save the second computation. - function computeStyleTests() { - - // This is a singleton, we need to execute it only once - if ( !div ) { - return; - } - - container.style.cssText = "position:absolute;left:-11111px;width:60px;" + - "margin-top:1px;padding:0;border:0"; - div.style.cssText = - "position:relative;display:block;box-sizing:border-box;overflow:scroll;" + - "margin:auto;border:1px;padding:1px;" + - "width:60%;top:1%"; - documentElement.appendChild( container ).appendChild( div ); - - var divStyle = window.getComputedStyle( div ); - pixelPositionVal = divStyle.top !== "1%"; - - // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 - reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12; - - // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3 - // Some styles come back with percentage values, even though they shouldn't - div.style.right = "60%"; - pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36; - - // Support: IE 9 - 11 only - // Detect misreporting of content dimensions for box-sizing:border-box elements - boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36; - - // Support: IE 9 only - // Detect overflow:scroll screwiness (gh-3699) - // Support: Chrome <=64 - // Don't get tricked when zoom affects offsetWidth (gh-4029) - div.style.position = "absolute"; - scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12; - - documentElement.removeChild( container ); - - // Nullify the div so it wouldn't be stored in the memory and - // it will also be a sign that checks already performed - div = null; - } - - function roundPixelMeasures( measure ) { - return Math.round( parseFloat( measure ) ); - } - - var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal, - reliableTrDimensionsVal, reliableMarginLeftVal, - container = document.createElement( "div" ), - div = document.createElement( "div" ); - - // Finish early in limited (non-browser) environments - if ( !div.style ) { - return; - } - - // Support: IE <=9 - 11 only - // Style of cloned element affects source element cloned (#8908) - div.style.backgroundClip = "content-box"; - div.cloneNode( true ).style.backgroundClip = ""; - support.clearCloneStyle = div.style.backgroundClip === "content-box"; - - jQuery.extend( support, { - boxSizingReliable: function() { - computeStyleTests(); - return boxSizingReliableVal; - }, - pixelBoxStyles: function() { - computeStyleTests(); - return pixelBoxStylesVal; - }, - pixelPosition: function() { - computeStyleTests(); - return pixelPositionVal; - }, - reliableMarginLeft: function() { - computeStyleTests(); - return reliableMarginLeftVal; - }, - scrollboxSize: function() { - computeStyleTests(); - return scrollboxSizeVal; - }, - - // Support: IE 9 - 11+, Edge 15 - 18+ - // IE/Edge misreport `getComputedStyle` of table rows with width/height - // set in CSS while `offset*` properties report correct values. - // Behavior in IE 9 is more subtle than in newer versions & it passes - // some versions of this test; make sure not to make it pass there! - reliableTrDimensions: function() { - var table, tr, trChild, trStyle; - if ( reliableTrDimensionsVal == null ) { - table = document.createElement( "table" ); - tr = document.createElement( "tr" ); - trChild = document.createElement( "div" ); - - table.style.cssText = "position:absolute;left:-11111px"; - tr.style.height = "1px"; - trChild.style.height = "9px"; - - documentElement - .appendChild( table ) - .appendChild( tr ) - .appendChild( trChild ); - - trStyle = window.getComputedStyle( tr ); - reliableTrDimensionsVal = parseInt( trStyle.height ) > 3; - - documentElement.removeChild( table ); - } - return reliableTrDimensionsVal; - } - } ); -} )(); - - -function curCSS( elem, name, computed ) { - var width, minWidth, maxWidth, ret, - - // Support: Firefox 51+ - // Retrieving style before computed somehow - // fixes an issue with getting wrong values - // on detached elements - style = elem.style; - - computed = computed || getStyles( elem ); - - // getPropertyValue is needed for: - // .css('filter') (IE 9 only, #12537) - // .css('--customProperty) (#3144) - if ( computed ) { - ret = computed.getPropertyValue( name ) || computed[ name ]; - - if ( ret === "" && !isAttached( elem ) ) { - ret = jQuery.style( elem, name ); - } - - // A tribute to the "awesome hack by Dean Edwards" - // Android Browser returns percentage for some values, - // but width seems to be reliably pixels. - // This is against the CSSOM draft spec: - // https://drafts.csswg.org/cssom/#resolved-values - if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) { - - // Remember the original values - width = style.width; - minWidth = style.minWidth; - maxWidth = style.maxWidth; - - // Put in the new values to get a computed value out - style.minWidth = style.maxWidth = style.width = ret; - ret = computed.width; - - // Revert the changed values - style.width = width; - style.minWidth = minWidth; - style.maxWidth = maxWidth; - } - } - - return ret !== undefined ? - - // Support: IE <=9 - 11 only - // IE returns zIndex value as an integer. - ret + "" : - ret; -} - - -function addGetHookIf( conditionFn, hookFn ) { - - // Define the hook, we'll check on the first run if it's really needed. - return { - get: function() { - if ( conditionFn() ) { - - // Hook not needed (or it's not possible to use it due - // to missing dependency), remove it. - delete this.get; - return; - } - - // Hook needed; redefine it so that the support test is not executed again. - return ( this.get = hookFn ).apply( this, arguments ); - } - }; -} - - -var cssPrefixes = [ "Webkit", "Moz", "ms" ], - emptyStyle = document.createElement( "div" ).style, - vendorProps = {}; - -// Return a vendor-prefixed property or undefined -function vendorPropName( name ) { - - // Check for vendor prefixed names - var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), - i = cssPrefixes.length; - - while ( i-- ) { - name = cssPrefixes[ i ] + capName; - if ( name in emptyStyle ) { - return name; - } - } -} - -// Return a potentially-mapped jQuery.cssProps or vendor prefixed property -function finalPropName( name ) { - var final = jQuery.cssProps[ name ] || vendorProps[ name ]; - - if ( final ) { - return final; - } - if ( name in emptyStyle ) { - return name; - } - return vendorProps[ name ] = vendorPropName( name ) || name; -} - - -var - - // Swappable if display is none or starts with table - // except "table", "table-cell", or "table-caption" - // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display - rdisplayswap = /^(none|table(?!-c[ea]).+)/, - rcustomProp = /^--/, - cssShow = { position: "absolute", visibility: "hidden", display: "block" }, - cssNormalTransform = { - letterSpacing: "0", - fontWeight: "400" - }; - -function setPositiveNumber( _elem, value, subtract ) { - - // Any relative (+/-) values have already been - // normalized at this point - var matches = rcssNum.exec( value ); - return matches ? - - // Guard against undefined "subtract", e.g., when used as in cssHooks - Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : - value; -} - -function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) { - var i = dimension === "width" ? 1 : 0, - extra = 0, - delta = 0; - - // Adjustment may not be necessary - if ( box === ( isBorderBox ? "border" : "content" ) ) { - return 0; - } - - for ( ; i < 4; i += 2 ) { - - // Both box models exclude margin - if ( box === "margin" ) { - delta += jQuery.css( elem, box + cssExpand[ i ], true, styles ); - } - - // If we get here with a content-box, we're seeking "padding" or "border" or "margin" - if ( !isBorderBox ) { - - // Add padding - delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - - // For "border" or "margin", add border - if ( box !== "padding" ) { - delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - - // But still keep track of it otherwise - } else { - extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - - // If we get here with a border-box (content + padding + border), we're seeking "content" or - // "padding" or "margin" - } else { - - // For "content", subtract padding - if ( box === "content" ) { - delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - } - - // For "content" or "padding", subtract border - if ( box !== "margin" ) { - delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } - } - - // Account for positive content-box scroll gutter when requested by providing computedVal - if ( !isBorderBox && computedVal >= 0 ) { - - // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border - // Assuming integer scroll gutter, subtract the rest and round down - delta += Math.max( 0, Math.ceil( - elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - - computedVal - - delta - - extra - - 0.5 - - // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter - // Use an explicit zero to avoid NaN (gh-3964) - ) ) || 0; - } - - return delta; -} - -function getWidthOrHeight( elem, dimension, extra ) { - - // Start with computed style - var styles = getStyles( elem ), - - // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322). - // Fake content-box until we know it's needed to know the true value. - boxSizingNeeded = !support.boxSizingReliable() || extra, - isBorderBox = boxSizingNeeded && - jQuery.css( elem, "boxSizing", false, styles ) === "border-box", - valueIsBorderBox = isBorderBox, - - val = curCSS( elem, dimension, styles ), - offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ); - - // Support: Firefox <=54 - // Return a confounding non-pixel value or feign ignorance, as appropriate. - if ( rnumnonpx.test( val ) ) { - if ( !extra ) { - return val; - } - val = "auto"; - } - - - // Support: IE 9 - 11 only - // Use offsetWidth/offsetHeight for when box sizing is unreliable. - // In those cases, the computed value can be trusted to be border-box. - if ( ( !support.boxSizingReliable() && isBorderBox || - - // Support: IE 10 - 11+, Edge 15 - 18+ - // IE/Edge misreport `getComputedStyle` of table rows with width/height - // set in CSS while `offset*` properties report correct values. - // Interestingly, in some cases IE 9 doesn't suffer from this issue. - !support.reliableTrDimensions() && nodeName( elem, "tr" ) || - - // Fall back to offsetWidth/offsetHeight when value is "auto" - // This happens for inline elements with no explicit setting (gh-3571) - val === "auto" || - - // Support: Android <=4.1 - 4.3 only - // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602) - !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) && - - // Make sure the element is visible & connected - elem.getClientRects().length ) { - - isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; - - // Where available, offsetWidth/offsetHeight approximate border box dimensions. - // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the - // retrieved value as a content box dimension. - valueIsBorderBox = offsetProp in elem; - if ( valueIsBorderBox ) { - val = elem[ offsetProp ]; - } - } - - // Normalize "" and auto - val = parseFloat( val ) || 0; - - // Adjust for the element's box model - return ( val + - boxModelAdjustment( - elem, - dimension, - extra || ( isBorderBox ? "border" : "content" ), - valueIsBorderBox, - styles, - - // Provide the current computed size to request scroll gutter calculation (gh-3589) - val - ) - ) + "px"; -} - -jQuery.extend( { - - // Add in style property hooks for overriding the default - // behavior of getting and setting a style property - cssHooks: { - opacity: { - get: function( elem, computed ) { - if ( computed ) { - - // We should always get a number back from opacity - var ret = curCSS( elem, "opacity" ); - return ret === "" ? "1" : ret; - } - } - } - }, - - // Don't automatically add "px" to these possibly-unitless properties - cssNumber: { - "animationIterationCount": true, - "columnCount": true, - "fillOpacity": true, - "flexGrow": true, - "flexShrink": true, - "fontWeight": true, - "gridArea": true, - "gridColumn": true, - "gridColumnEnd": true, - "gridColumnStart": true, - "gridRow": true, - "gridRowEnd": true, - "gridRowStart": true, - "lineHeight": true, - "opacity": true, - "order": true, - "orphans": true, - "widows": true, - "zIndex": true, - "zoom": true - }, - - // Add in properties whose names you wish to fix before - // setting or getting the value - cssProps: {}, - - // Get and set the style property on a DOM Node - style: function( elem, name, value, extra ) { - - // Don't set styles on text and comment nodes - if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { - return; - } - - // Make sure that we're working with the right name - var ret, type, hooks, - origName = camelCase( name ), - isCustomProp = rcustomProp.test( name ), - style = elem.style; - - // Make sure that we're working with the right name. We don't - // want to query the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Gets hook for the prefixed version, then unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // Check if we're setting a value - if ( value !== undefined ) { - type = typeof value; - - // Convert "+=" or "-=" to relative numbers (#7345) - if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { - value = adjustCSS( elem, name, ret ); - - // Fixes bug #9237 - type = "number"; - } - - // Make sure that null and NaN values aren't set (#7116) - if ( value == null || value !== value ) { - return; - } - - // If a number was passed in, add the unit (except for certain CSS properties) - // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append - // "px" to a few hardcoded values. - if ( type === "number" && !isCustomProp ) { - value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); - } - - // background-* props affect original clone's values - if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { - style[ name ] = "inherit"; - } - - // If a hook was provided, use that value, otherwise just set the specified value - if ( !hooks || !( "set" in hooks ) || - ( value = hooks.set( elem, value, extra ) ) !== undefined ) { - - if ( isCustomProp ) { - style.setProperty( name, value ); - } else { - style[ name ] = value; - } - } - - } else { - - // If a hook was provided get the non-computed value from there - if ( hooks && "get" in hooks && - ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { - - return ret; - } - - // Otherwise just get the value from the style object - return style[ name ]; - } - }, - - css: function( elem, name, extra, styles ) { - var val, num, hooks, - origName = camelCase( name ), - isCustomProp = rcustomProp.test( name ); - - // Make sure that we're working with the right name. We don't - // want to modify the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Try prefixed name followed by the unprefixed name - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // If a hook was provided get the computed value from there - if ( hooks && "get" in hooks ) { - val = hooks.get( elem, true, extra ); - } - - // Otherwise, if a way to get the computed value exists, use that - if ( val === undefined ) { - val = curCSS( elem, name, styles ); - } - - // Convert "normal" to computed value - if ( val === "normal" && name in cssNormalTransform ) { - val = cssNormalTransform[ name ]; - } - - // Make numeric if forced or a qualifier was provided and val looks numeric - if ( extra === "" || extra ) { - num = parseFloat( val ); - return extra === true || isFinite( num ) ? num || 0 : val; - } - - return val; - } -} ); - -jQuery.each( [ "height", "width" ], function( _i, dimension ) { - jQuery.cssHooks[ dimension ] = { - get: function( elem, computed, extra ) { - if ( computed ) { - - // Certain elements can have dimension info if we invisibly show them - // but it must have a current display style that would benefit - return rdisplayswap.test( jQuery.css( elem, "display" ) ) && - - // Support: Safari 8+ - // Table columns in Safari have non-zero offsetWidth & zero - // getBoundingClientRect().width unless display is changed. - // Support: IE <=11 only - // Running getBoundingClientRect on a disconnected node - // in IE throws an error. - ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? - swap( elem, cssShow, function() { - return getWidthOrHeight( elem, dimension, extra ); - } ) : - getWidthOrHeight( elem, dimension, extra ); - } - }, - - set: function( elem, value, extra ) { - var matches, - styles = getStyles( elem ), - - // Only read styles.position if the test has a chance to fail - // to avoid forcing a reflow. - scrollboxSizeBuggy = !support.scrollboxSize() && - styles.position === "absolute", - - // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991) - boxSizingNeeded = scrollboxSizeBuggy || extra, - isBorderBox = boxSizingNeeded && - jQuery.css( elem, "boxSizing", false, styles ) === "border-box", - subtract = extra ? - boxModelAdjustment( - elem, - dimension, - extra, - isBorderBox, - styles - ) : - 0; - - // Account for unreliable border-box dimensions by comparing offset* to computed and - // faking a content-box to get border and padding (gh-3699) - if ( isBorderBox && scrollboxSizeBuggy ) { - subtract -= Math.ceil( - elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - - parseFloat( styles[ dimension ] ) - - boxModelAdjustment( elem, dimension, "border", false, styles ) - - 0.5 - ); - } - - // Convert to pixels if value adjustment is needed - if ( subtract && ( matches = rcssNum.exec( value ) ) && - ( matches[ 3 ] || "px" ) !== "px" ) { - - elem.style[ dimension ] = value; - value = jQuery.css( elem, dimension ); - } - - return setPositiveNumber( elem, value, subtract ); - } - }; -} ); - -jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, - function( elem, computed ) { - if ( computed ) { - return ( parseFloat( curCSS( elem, "marginLeft" ) ) || - elem.getBoundingClientRect().left - - swap( elem, { marginLeft: 0 }, function() { - return elem.getBoundingClientRect().left; - } ) - ) + "px"; - } - } -); - -// These hooks are used by animate to expand properties -jQuery.each( { - margin: "", - padding: "", - border: "Width" -}, function( prefix, suffix ) { - jQuery.cssHooks[ prefix + suffix ] = { - expand: function( value ) { - var i = 0, - expanded = {}, - - // Assumes a single number if not a string - parts = typeof value === "string" ? value.split( " " ) : [ value ]; - - for ( ; i < 4; i++ ) { - expanded[ prefix + cssExpand[ i ] + suffix ] = - parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; - } - - return expanded; - } - }; - - if ( prefix !== "margin" ) { - jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; - } -} ); - -jQuery.fn.extend( { - css: function( name, value ) { - return access( this, function( elem, name, value ) { - var styles, len, - map = {}, - i = 0; - - if ( Array.isArray( name ) ) { - styles = getStyles( elem ); - len = name.length; - - for ( ; i < len; i++ ) { - map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); - } - - return map; - } - - return value !== undefined ? - jQuery.style( elem, name, value ) : - jQuery.css( elem, name ); - }, name, value, arguments.length > 1 ); - } -} ); - - -function Tween( elem, options, prop, end, easing ) { - return new Tween.prototype.init( elem, options, prop, end, easing ); -} -jQuery.Tween = Tween; - -Tween.prototype = { - constructor: Tween, - init: function( elem, options, prop, end, easing, unit ) { - this.elem = elem; - this.prop = prop; - this.easing = easing || jQuery.easing._default; - this.options = options; - this.start = this.now = this.cur(); - this.end = end; - this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); - }, - cur: function() { - var hooks = Tween.propHooks[ this.prop ]; - - return hooks && hooks.get ? - hooks.get( this ) : - Tween.propHooks._default.get( this ); - }, - run: function( percent ) { - var eased, - hooks = Tween.propHooks[ this.prop ]; - - if ( this.options.duration ) { - this.pos = eased = jQuery.easing[ this.easing ]( - percent, this.options.duration * percent, 0, 1, this.options.duration - ); - } else { - this.pos = eased = percent; - } - this.now = ( this.end - this.start ) * eased + this.start; - - if ( this.options.step ) { - this.options.step.call( this.elem, this.now, this ); - } - - if ( hooks && hooks.set ) { - hooks.set( this ); - } else { - Tween.propHooks._default.set( this ); - } - return this; - } -}; - -Tween.prototype.init.prototype = Tween.prototype; - -Tween.propHooks = { - _default: { - get: function( tween ) { - var result; - - // Use a property on the element directly when it is not a DOM element, - // or when there is no matching style property that exists. - if ( tween.elem.nodeType !== 1 || - tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { - return tween.elem[ tween.prop ]; - } - - // Passing an empty string as a 3rd parameter to .css will automatically - // attempt a parseFloat and fallback to a string if the parse fails. - // Simple values such as "10px" are parsed to Float; - // complex values such as "rotate(1rad)" are returned as-is. - result = jQuery.css( tween.elem, tween.prop, "" ); - - // Empty strings, null, undefined and "auto" are converted to 0. - return !result || result === "auto" ? 0 : result; - }, - set: function( tween ) { - - // Use step hook for back compat. - // Use cssHook if its there. - // Use .style if available and use plain properties where available. - if ( jQuery.fx.step[ tween.prop ] ) { - jQuery.fx.step[ tween.prop ]( tween ); - } else if ( tween.elem.nodeType === 1 && ( - jQuery.cssHooks[ tween.prop ] || - tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) { - jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); - } else { - tween.elem[ tween.prop ] = tween.now; - } - } - } -}; - -// Support: IE <=9 only -// Panic based approach to setting things on disconnected nodes -Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { - set: function( tween ) { - if ( tween.elem.nodeType && tween.elem.parentNode ) { - tween.elem[ tween.prop ] = tween.now; - } - } -}; - -jQuery.easing = { - linear: function( p ) { - return p; - }, - swing: function( p ) { - return 0.5 - Math.cos( p * Math.PI ) / 2; - }, - _default: "swing" -}; - -jQuery.fx = Tween.prototype.init; - -// Back compat <1.8 extension point -jQuery.fx.step = {}; - - - - -var - fxNow, inProgress, - rfxtypes = /^(?:toggle|show|hide)$/, - rrun = /queueHooks$/; - -function schedule() { - if ( inProgress ) { - if ( document.hidden === false && window.requestAnimationFrame ) { - window.requestAnimationFrame( schedule ); - } else { - window.setTimeout( schedule, jQuery.fx.interval ); - } - - jQuery.fx.tick(); - } -} - -// Animations created synchronously will run synchronously -function createFxNow() { - window.setTimeout( function() { - fxNow = undefined; - } ); - return ( fxNow = Date.now() ); -} - -// Generate parameters to create a standard animation -function genFx( type, includeWidth ) { - var which, - i = 0, - attrs = { height: type }; - - // If we include width, step value is 1 to do all cssExpand values, - // otherwise step value is 2 to skip over Left and Right - includeWidth = includeWidth ? 1 : 0; - for ( ; i < 4; i += 2 - includeWidth ) { - which = cssExpand[ i ]; - attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; - } - - if ( includeWidth ) { - attrs.opacity = attrs.width = type; - } - - return attrs; -} - -function createTween( value, prop, animation ) { - var tween, - collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), - index = 0, - length = collection.length; - for ( ; index < length; index++ ) { - if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { - - // We're done with this property - return tween; - } - } -} - -function defaultPrefilter( elem, props, opts ) { - var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, - isBox = "width" in props || "height" in props, - anim = this, - orig = {}, - style = elem.style, - hidden = elem.nodeType && isHiddenWithinTree( elem ), - dataShow = dataPriv.get( elem, "fxshow" ); - - // Queue-skipping animations hijack the fx hooks - if ( !opts.queue ) { - hooks = jQuery._queueHooks( elem, "fx" ); - if ( hooks.unqueued == null ) { - hooks.unqueued = 0; - oldfire = hooks.empty.fire; - hooks.empty.fire = function() { - if ( !hooks.unqueued ) { - oldfire(); - } - }; - } - hooks.unqueued++; - - anim.always( function() { - - // Ensure the complete handler is called before this completes - anim.always( function() { - hooks.unqueued--; - if ( !jQuery.queue( elem, "fx" ).length ) { - hooks.empty.fire(); - } - } ); - } ); - } - - // Detect show/hide animations - for ( prop in props ) { - value = props[ prop ]; - if ( rfxtypes.test( value ) ) { - delete props[ prop ]; - toggle = toggle || value === "toggle"; - if ( value === ( hidden ? "hide" : "show" ) ) { - - // Pretend to be hidden if this is a "show" and - // there is still data from a stopped show/hide - if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { - hidden = true; - - // Ignore all other no-op show/hide data - } else { - continue; - } - } - orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); - } - } - - // Bail out if this is a no-op like .hide().hide() - propTween = !jQuery.isEmptyObject( props ); - if ( !propTween && jQuery.isEmptyObject( orig ) ) { - return; - } - - // Restrict "overflow" and "display" styles during box animations - if ( isBox && elem.nodeType === 1 ) { - - // Support: IE <=9 - 11, Edge 12 - 15 - // Record all 3 overflow attributes because IE does not infer the shorthand - // from identically-valued overflowX and overflowY and Edge just mirrors - // the overflowX value there. - opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; - - // Identify a display type, preferring old show/hide data over the CSS cascade - restoreDisplay = dataShow && dataShow.display; - if ( restoreDisplay == null ) { - restoreDisplay = dataPriv.get( elem, "display" ); - } - display = jQuery.css( elem, "display" ); - if ( display === "none" ) { - if ( restoreDisplay ) { - display = restoreDisplay; - } else { - - // Get nonempty value(s) by temporarily forcing visibility - showHide( [ elem ], true ); - restoreDisplay = elem.style.display || restoreDisplay; - display = jQuery.css( elem, "display" ); - showHide( [ elem ] ); - } - } - - // Animate inline elements as inline-block - if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { - if ( jQuery.css( elem, "float" ) === "none" ) { - - // Restore the original display value at the end of pure show/hide animations - if ( !propTween ) { - anim.done( function() { - style.display = restoreDisplay; - } ); - if ( restoreDisplay == null ) { - display = style.display; - restoreDisplay = display === "none" ? "" : display; - } - } - style.display = "inline-block"; - } - } - } - - if ( opts.overflow ) { - style.overflow = "hidden"; - anim.always( function() { - style.overflow = opts.overflow[ 0 ]; - style.overflowX = opts.overflow[ 1 ]; - style.overflowY = opts.overflow[ 2 ]; - } ); - } - - // Implement show/hide animations - propTween = false; - for ( prop in orig ) { - - // General show/hide setup for this element animation - if ( !propTween ) { - if ( dataShow ) { - if ( "hidden" in dataShow ) { - hidden = dataShow.hidden; - } - } else { - dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); - } - - // Store hidden/visible for toggle so `.stop().toggle()` "reverses" - if ( toggle ) { - dataShow.hidden = !hidden; - } - - // Show elements before animating them - if ( hidden ) { - showHide( [ elem ], true ); - } - - /* eslint-disable no-loop-func */ - - anim.done( function() { - - /* eslint-enable no-loop-func */ - - // The final step of a "hide" animation is actually hiding the element - if ( !hidden ) { - showHide( [ elem ] ); - } - dataPriv.remove( elem, "fxshow" ); - for ( prop in orig ) { - jQuery.style( elem, prop, orig[ prop ] ); - } - } ); - } - - // Per-property setup - propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); - if ( !( prop in dataShow ) ) { - dataShow[ prop ] = propTween.start; - if ( hidden ) { - propTween.end = propTween.start; - propTween.start = 0; - } - } - } -} - -function propFilter( props, specialEasing ) { - var index, name, easing, value, hooks; - - // camelCase, specialEasing and expand cssHook pass - for ( index in props ) { - name = camelCase( index ); - easing = specialEasing[ name ]; - value = props[ index ]; - if ( Array.isArray( value ) ) { - easing = value[ 1 ]; - value = props[ index ] = value[ 0 ]; - } - - if ( index !== name ) { - props[ name ] = value; - delete props[ index ]; - } - - hooks = jQuery.cssHooks[ name ]; - if ( hooks && "expand" in hooks ) { - value = hooks.expand( value ); - delete props[ name ]; - - // Not quite $.extend, this won't overwrite existing keys. - // Reusing 'index' because we have the correct "name" - for ( index in value ) { - if ( !( index in props ) ) { - props[ index ] = value[ index ]; - specialEasing[ index ] = easing; - } - } - } else { - specialEasing[ name ] = easing; - } - } -} - -function Animation( elem, properties, options ) { - var result, - stopped, - index = 0, - length = Animation.prefilters.length, - deferred = jQuery.Deferred().always( function() { - - // Don't match elem in the :animated selector - delete tick.elem; - } ), - tick = function() { - if ( stopped ) { - return false; - } - var currentTime = fxNow || createFxNow(), - remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), - - // Support: Android 2.3 only - // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) - temp = remaining / animation.duration || 0, - percent = 1 - temp, - index = 0, - length = animation.tweens.length; - - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( percent ); - } - - deferred.notifyWith( elem, [ animation, percent, remaining ] ); - - // If there's more to do, yield - if ( percent < 1 && length ) { - return remaining; - } - - // If this was an empty animation, synthesize a final progress notification - if ( !length ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - } - - // Resolve the animation and report its conclusion - deferred.resolveWith( elem, [ animation ] ); - return false; - }, - animation = deferred.promise( { - elem: elem, - props: jQuery.extend( {}, properties ), - opts: jQuery.extend( true, { - specialEasing: {}, - easing: jQuery.easing._default - }, options ), - originalProperties: properties, - originalOptions: options, - startTime: fxNow || createFxNow(), - duration: options.duration, - tweens: [], - createTween: function( prop, end ) { - var tween = jQuery.Tween( elem, animation.opts, prop, end, - animation.opts.specialEasing[ prop ] || animation.opts.easing ); - animation.tweens.push( tween ); - return tween; - }, - stop: function( gotoEnd ) { - var index = 0, - - // If we are going to the end, we want to run all the tweens - // otherwise we skip this part - length = gotoEnd ? animation.tweens.length : 0; - if ( stopped ) { - return this; - } - stopped = true; - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( 1 ); - } - - // Resolve when we played the last frame; otherwise, reject - if ( gotoEnd ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - deferred.resolveWith( elem, [ animation, gotoEnd ] ); - } else { - deferred.rejectWith( elem, [ animation, gotoEnd ] ); - } - return this; - } - } ), - props = animation.props; - - propFilter( props, animation.opts.specialEasing ); - - for ( ; index < length; index++ ) { - result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); - if ( result ) { - if ( isFunction( result.stop ) ) { - jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = - result.stop.bind( result ); - } - return result; - } - } - - jQuery.map( props, createTween, animation ); - - if ( isFunction( animation.opts.start ) ) { - animation.opts.start.call( elem, animation ); - } - - // Attach callbacks from options - animation - .progress( animation.opts.progress ) - .done( animation.opts.done, animation.opts.complete ) - .fail( animation.opts.fail ) - .always( animation.opts.always ); - - jQuery.fx.timer( - jQuery.extend( tick, { - elem: elem, - anim: animation, - queue: animation.opts.queue - } ) - ); - - return animation; -} - -jQuery.Animation = jQuery.extend( Animation, { - - tweeners: { - "*": [ function( prop, value ) { - var tween = this.createTween( prop, value ); - adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); - return tween; - } ] - }, - - tweener: function( props, callback ) { - if ( isFunction( props ) ) { - callback = props; - props = [ "*" ]; - } else { - props = props.match( rnothtmlwhite ); - } - - var prop, - index = 0, - length = props.length; - - for ( ; index < length; index++ ) { - prop = props[ index ]; - Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; - Animation.tweeners[ prop ].unshift( callback ); - } - }, - - prefilters: [ defaultPrefilter ], - - prefilter: function( callback, prepend ) { - if ( prepend ) { - Animation.prefilters.unshift( callback ); - } else { - Animation.prefilters.push( callback ); - } - } -} ); - -jQuery.speed = function( speed, easing, fn ) { - var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { - complete: fn || !fn && easing || - isFunction( speed ) && speed, - duration: speed, - easing: fn && easing || easing && !isFunction( easing ) && easing - }; - - // Go to the end state if fx are off - if ( jQuery.fx.off ) { - opt.duration = 0; - - } else { - if ( typeof opt.duration !== "number" ) { - if ( opt.duration in jQuery.fx.speeds ) { - opt.duration = jQuery.fx.speeds[ opt.duration ]; - - } else { - opt.duration = jQuery.fx.speeds._default; - } - } - } - - // Normalize opt.queue - true/undefined/null -> "fx" - if ( opt.queue == null || opt.queue === true ) { - opt.queue = "fx"; - } - - // Queueing - opt.old = opt.complete; - - opt.complete = function() { - if ( isFunction( opt.old ) ) { - opt.old.call( this ); - } - - if ( opt.queue ) { - jQuery.dequeue( this, opt.queue ); - } - }; - - return opt; -}; - -jQuery.fn.extend( { - fadeTo: function( speed, to, easing, callback ) { - - // Show any hidden elements after setting opacity to 0 - return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() - - // Animate to the value specified - .end().animate( { opacity: to }, speed, easing, callback ); - }, - animate: function( prop, speed, easing, callback ) { - var empty = jQuery.isEmptyObject( prop ), - optall = jQuery.speed( speed, easing, callback ), - doAnimation = function() { - - // Operate on a copy of prop so per-property easing won't be lost - var anim = Animation( this, jQuery.extend( {}, prop ), optall ); - - // Empty animations, or finishing resolves immediately - if ( empty || dataPriv.get( this, "finish" ) ) { - anim.stop( true ); - } - }; - doAnimation.finish = doAnimation; - - return empty || optall.queue === false ? - this.each( doAnimation ) : - this.queue( optall.queue, doAnimation ); - }, - stop: function( type, clearQueue, gotoEnd ) { - var stopQueue = function( hooks ) { - var stop = hooks.stop; - delete hooks.stop; - stop( gotoEnd ); - }; - - if ( typeof type !== "string" ) { - gotoEnd = clearQueue; - clearQueue = type; - type = undefined; - } - if ( clearQueue ) { - this.queue( type || "fx", [] ); - } - - return this.each( function() { - var dequeue = true, - index = type != null && type + "queueHooks", - timers = jQuery.timers, - data = dataPriv.get( this ); - - if ( index ) { - if ( data[ index ] && data[ index ].stop ) { - stopQueue( data[ index ] ); - } - } else { - for ( index in data ) { - if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { - stopQueue( data[ index ] ); - } - } - } - - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && - ( type == null || timers[ index ].queue === type ) ) { - - timers[ index ].anim.stop( gotoEnd ); - dequeue = false; - timers.splice( index, 1 ); - } - } - - // Start the next in the queue if the last step wasn't forced. - // Timers currently will call their complete callbacks, which - // will dequeue but only if they were gotoEnd. - if ( dequeue || !gotoEnd ) { - jQuery.dequeue( this, type ); - } - } ); - }, - finish: function( type ) { - if ( type !== false ) { - type = type || "fx"; - } - return this.each( function() { - var index, - data = dataPriv.get( this ), - queue = data[ type + "queue" ], - hooks = data[ type + "queueHooks" ], - timers = jQuery.timers, - length = queue ? queue.length : 0; - - // Enable finishing flag on private data - data.finish = true; - - // Empty the queue first - jQuery.queue( this, type, [] ); - - if ( hooks && hooks.stop ) { - hooks.stop.call( this, true ); - } - - // Look for any active animations, and finish them - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && timers[ index ].queue === type ) { - timers[ index ].anim.stop( true ); - timers.splice( index, 1 ); - } - } - - // Look for any animations in the old queue and finish them - for ( index = 0; index < length; index++ ) { - if ( queue[ index ] && queue[ index ].finish ) { - queue[ index ].finish.call( this ); - } - } - - // Turn off finishing flag - delete data.finish; - } ); - } -} ); - -jQuery.each( [ "toggle", "show", "hide" ], function( _i, name ) { - var cssFn = jQuery.fn[ name ]; - jQuery.fn[ name ] = function( speed, easing, callback ) { - return speed == null || typeof speed === "boolean" ? - cssFn.apply( this, arguments ) : - this.animate( genFx( name, true ), speed, easing, callback ); - }; -} ); - -// Generate shortcuts for custom animations -jQuery.each( { - slideDown: genFx( "show" ), - slideUp: genFx( "hide" ), - slideToggle: genFx( "toggle" ), - fadeIn: { opacity: "show" }, - fadeOut: { opacity: "hide" }, - fadeToggle: { opacity: "toggle" } -}, function( name, props ) { - jQuery.fn[ name ] = function( speed, easing, callback ) { - return this.animate( props, speed, easing, callback ); - }; -} ); - -jQuery.timers = []; -jQuery.fx.tick = function() { - var timer, - i = 0, - timers = jQuery.timers; - - fxNow = Date.now(); - - for ( ; i < timers.length; i++ ) { - timer = timers[ i ]; - - // Run the timer and safely remove it when done (allowing for external removal) - if ( !timer() && timers[ i ] === timer ) { - timers.splice( i--, 1 ); - } - } - - if ( !timers.length ) { - jQuery.fx.stop(); - } - fxNow = undefined; -}; - -jQuery.fx.timer = function( timer ) { - jQuery.timers.push( timer ); - jQuery.fx.start(); -}; - -jQuery.fx.interval = 13; -jQuery.fx.start = function() { - if ( inProgress ) { - return; - } - - inProgress = true; - schedule(); -}; - -jQuery.fx.stop = function() { - inProgress = null; -}; - -jQuery.fx.speeds = { - slow: 600, - fast: 200, - - // Default speed - _default: 400 -}; - - -// Based off of the plugin by Clint Helfers, with permission. -// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ -jQuery.fn.delay = function( time, type ) { - time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; - type = type || "fx"; - - return this.queue( type, function( next, hooks ) { - var timeout = window.setTimeout( next, time ); - hooks.stop = function() { - window.clearTimeout( timeout ); - }; - } ); -}; - - -( function() { - var input = document.createElement( "input" ), - select = document.createElement( "select" ), - opt = select.appendChild( document.createElement( "option" ) ); - - input.type = "checkbox"; - - // Support: Android <=4.3 only - // Default value for a checkbox should be "on" - support.checkOn = input.value !== ""; - - // Support: IE <=11 only - // Must access selectedIndex to make default options select - support.optSelected = opt.selected; - - // Support: IE <=11 only - // An input loses its value after becoming a radio - input = document.createElement( "input" ); - input.value = "t"; - input.type = "radio"; - support.radioValue = input.value === "t"; -} )(); - - -var boolHook, - attrHandle = jQuery.expr.attrHandle; - -jQuery.fn.extend( { - attr: function( name, value ) { - return access( this, jQuery.attr, name, value, arguments.length > 1 ); - }, - - removeAttr: function( name ) { - return this.each( function() { - jQuery.removeAttr( this, name ); - } ); - } -} ); - -jQuery.extend( { - attr: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set attributes on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - // Fallback to prop when attributes are not supported - if ( typeof elem.getAttribute === "undefined" ) { - return jQuery.prop( elem, name, value ); - } - - // Attribute hooks are determined by the lowercase version - // Grab necessary hook if one is defined - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - hooks = jQuery.attrHooks[ name.toLowerCase() ] || - ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); - } - - if ( value !== undefined ) { - if ( value === null ) { - jQuery.removeAttr( elem, name ); - return; - } - - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - elem.setAttribute( name, value + "" ); - return value; - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - ret = jQuery.find.attr( elem, name ); - - // Non-existent attributes return null, we normalize to undefined - return ret == null ? undefined : ret; - }, - - attrHooks: { - type: { - set: function( elem, value ) { - if ( !support.radioValue && value === "radio" && - nodeName( elem, "input" ) ) { - var val = elem.value; - elem.setAttribute( "type", value ); - if ( val ) { - elem.value = val; - } - return value; - } - } - } - }, - - removeAttr: function( elem, value ) { - var name, - i = 0, - - // Attribute names can contain non-HTML whitespace characters - // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 - attrNames = value && value.match( rnothtmlwhite ); - - if ( attrNames && elem.nodeType === 1 ) { - while ( ( name = attrNames[ i++ ] ) ) { - elem.removeAttribute( name ); - } - } - } -} ); - -// Hooks for boolean attributes -boolHook = { - set: function( elem, value, name ) { - if ( value === false ) { - - // Remove boolean attributes when set to false - jQuery.removeAttr( elem, name ); - } else { - elem.setAttribute( name, name ); - } - return name; - } -}; - -jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( _i, name ) { - var getter = attrHandle[ name ] || jQuery.find.attr; - - attrHandle[ name ] = function( elem, name, isXML ) { - var ret, handle, - lowercaseName = name.toLowerCase(); - - if ( !isXML ) { - - // Avoid an infinite loop by temporarily removing this function from the getter - handle = attrHandle[ lowercaseName ]; - attrHandle[ lowercaseName ] = ret; - ret = getter( elem, name, isXML ) != null ? - lowercaseName : - null; - attrHandle[ lowercaseName ] = handle; - } - return ret; - }; -} ); - - - - -var rfocusable = /^(?:input|select|textarea|button)$/i, - rclickable = /^(?:a|area)$/i; - -jQuery.fn.extend( { - prop: function( name, value ) { - return access( this, jQuery.prop, name, value, arguments.length > 1 ); - }, - - removeProp: function( name ) { - return this.each( function() { - delete this[ jQuery.propFix[ name ] || name ]; - } ); - } -} ); - -jQuery.extend( { - prop: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set properties on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - - // Fix name and attach hooks - name = jQuery.propFix[ name ] || name; - hooks = jQuery.propHooks[ name ]; - } - - if ( value !== undefined ) { - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - return ( elem[ name ] = value ); - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - return elem[ name ]; - }, - - propHooks: { - tabIndex: { - get: function( elem ) { - - // Support: IE <=9 - 11 only - // elem.tabIndex doesn't always return the - // correct value when it hasn't been explicitly set - // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ - // Use proper attribute retrieval(#12072) - var tabindex = jQuery.find.attr( elem, "tabindex" ); - - if ( tabindex ) { - return parseInt( tabindex, 10 ); - } - - if ( - rfocusable.test( elem.nodeName ) || - rclickable.test( elem.nodeName ) && - elem.href - ) { - return 0; - } - - return -1; - } - } - }, - - propFix: { - "for": "htmlFor", - "class": "className" - } -} ); - -// Support: IE <=11 only -// Accessing the selectedIndex property -// forces the browser to respect setting selected -// on the option -// The getter ensures a default option is selected -// when in an optgroup -// eslint rule "no-unused-expressions" is disabled for this code -// since it considers such accessions noop -if ( !support.optSelected ) { - jQuery.propHooks.selected = { - get: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent && parent.parentNode ) { - parent.parentNode.selectedIndex; - } - return null; - }, - set: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent ) { - parent.selectedIndex; - - if ( parent.parentNode ) { - parent.parentNode.selectedIndex; - } - } - } - }; -} - -jQuery.each( [ - "tabIndex", - "readOnly", - "maxLength", - "cellSpacing", - "cellPadding", - "rowSpan", - "colSpan", - "useMap", - "frameBorder", - "contentEditable" -], function() { - jQuery.propFix[ this.toLowerCase() ] = this; -} ); - - - - - // Strip and collapse whitespace according to HTML spec - // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace - function stripAndCollapse( value ) { - var tokens = value.match( rnothtmlwhite ) || []; - return tokens.join( " " ); - } - - -function getClass( elem ) { - return elem.getAttribute && elem.getAttribute( "class" ) || ""; -} - -function classesToArray( value ) { - if ( Array.isArray( value ) ) { - return value; - } - if ( typeof value === "string" ) { - return value.match( rnothtmlwhite ) || []; - } - return []; -} - -jQuery.fn.extend( { - addClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - classes = classesToArray( value ); - - if ( classes.length ) { - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - if ( cur.indexOf( " " + clazz + " " ) < 0 ) { - cur += clazz + " "; - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - removeClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - if ( !arguments.length ) { - return this.attr( "class", "" ); - } - - classes = classesToArray( value ); - - if ( classes.length ) { - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - - // This expression is here for better compressibility (see addClass) - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - - // Remove *all* instances - while ( cur.indexOf( " " + clazz + " " ) > -1 ) { - cur = cur.replace( " " + clazz + " ", " " ); - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - toggleClass: function( value, stateVal ) { - var type = typeof value, - isValidValue = type === "string" || Array.isArray( value ); - - if ( typeof stateVal === "boolean" && isValidValue ) { - return stateVal ? this.addClass( value ) : this.removeClass( value ); - } - - if ( isFunction( value ) ) { - return this.each( function( i ) { - jQuery( this ).toggleClass( - value.call( this, i, getClass( this ), stateVal ), - stateVal - ); - } ); - } - - return this.each( function() { - var className, i, self, classNames; - - if ( isValidValue ) { - - // Toggle individual class names - i = 0; - self = jQuery( this ); - classNames = classesToArray( value ); - - while ( ( className = classNames[ i++ ] ) ) { - - // Check each className given, space separated list - if ( self.hasClass( className ) ) { - self.removeClass( className ); - } else { - self.addClass( className ); - } - } - - // Toggle whole class name - } else if ( value === undefined || type === "boolean" ) { - className = getClass( this ); - if ( className ) { - - // Store className if set - dataPriv.set( this, "__className__", className ); - } - - // If the element has a class name or if we're passed `false`, - // then remove the whole classname (if there was one, the above saved it). - // Otherwise bring back whatever was previously saved (if anything), - // falling back to the empty string if nothing was stored. - if ( this.setAttribute ) { - this.setAttribute( "class", - className || value === false ? - "" : - dataPriv.get( this, "__className__" ) || "" - ); - } - } - } ); - }, - - hasClass: function( selector ) { - var className, elem, - i = 0; - - className = " " + selector + " "; - while ( ( elem = this[ i++ ] ) ) { - if ( elem.nodeType === 1 && - ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { - return true; - } - } - - return false; - } -} ); - - - - -var rreturn = /\r/g; - -jQuery.fn.extend( { - val: function( value ) { - var hooks, ret, valueIsFunction, - elem = this[ 0 ]; - - if ( !arguments.length ) { - if ( elem ) { - hooks = jQuery.valHooks[ elem.type ] || - jQuery.valHooks[ elem.nodeName.toLowerCase() ]; - - if ( hooks && - "get" in hooks && - ( ret = hooks.get( elem, "value" ) ) !== undefined - ) { - return ret; - } - - ret = elem.value; - - // Handle most common string cases - if ( typeof ret === "string" ) { - return ret.replace( rreturn, "" ); - } - - // Handle cases where value is null/undef or number - return ret == null ? "" : ret; - } - - return; - } - - valueIsFunction = isFunction( value ); - - return this.each( function( i ) { - var val; - - if ( this.nodeType !== 1 ) { - return; - } - - if ( valueIsFunction ) { - val = value.call( this, i, jQuery( this ).val() ); - } else { - val = value; - } - - // Treat null/undefined as ""; convert numbers to string - if ( val == null ) { - val = ""; - - } else if ( typeof val === "number" ) { - val += ""; - - } else if ( Array.isArray( val ) ) { - val = jQuery.map( val, function( value ) { - return value == null ? "" : value + ""; - } ); - } - - hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; - - // If set returns undefined, fall back to normal setting - if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { - this.value = val; - } - } ); - } -} ); - -jQuery.extend( { - valHooks: { - option: { - get: function( elem ) { - - var val = jQuery.find.attr( elem, "value" ); - return val != null ? - val : - - // Support: IE <=10 - 11 only - // option.text throws exceptions (#14686, #14858) - // Strip and collapse whitespace - // https://html.spec.whatwg.org/#strip-and-collapse-whitespace - stripAndCollapse( jQuery.text( elem ) ); - } - }, - select: { - get: function( elem ) { - var value, option, i, - options = elem.options, - index = elem.selectedIndex, - one = elem.type === "select-one", - values = one ? null : [], - max = one ? index + 1 : options.length; - - if ( index < 0 ) { - i = max; - - } else { - i = one ? index : 0; - } - - // Loop through all the selected options - for ( ; i < max; i++ ) { - option = options[ i ]; - - // Support: IE <=9 only - // IE8-9 doesn't update selected after form reset (#2551) - if ( ( option.selected || i === index ) && - - // Don't return options that are disabled or in a disabled optgroup - !option.disabled && - ( !option.parentNode.disabled || - !nodeName( option.parentNode, "optgroup" ) ) ) { - - // Get the specific value for the option - value = jQuery( option ).val(); - - // We don't need an array for one selects - if ( one ) { - return value; - } - - // Multi-Selects return an array - values.push( value ); - } - } - - return values; - }, - - set: function( elem, value ) { - var optionSet, option, - options = elem.options, - values = jQuery.makeArray( value ), - i = options.length; - - while ( i-- ) { - option = options[ i ]; - - /* eslint-disable no-cond-assign */ - - if ( option.selected = - jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 - ) { - optionSet = true; - } - - /* eslint-enable no-cond-assign */ - } - - // Force browsers to behave consistently when non-matching value is set - if ( !optionSet ) { - elem.selectedIndex = -1; - } - return values; - } - } - } -} ); - -// Radios and checkboxes getter/setter -jQuery.each( [ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = { - set: function( elem, value ) { - if ( Array.isArray( value ) ) { - return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); - } - } - }; - if ( !support.checkOn ) { - jQuery.valHooks[ this ].get = function( elem ) { - return elem.getAttribute( "value" ) === null ? "on" : elem.value; - }; - } -} ); - - - - -// Return jQuery for attributes-only inclusion - - -support.focusin = "onfocusin" in window; - - -var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, - stopPropagationCallback = function( e ) { - e.stopPropagation(); - }; - -jQuery.extend( jQuery.event, { - - trigger: function( event, data, elem, onlyHandlers ) { - - var i, cur, tmp, bubbleType, ontype, handle, special, lastElement, - eventPath = [ elem || document ], - type = hasOwn.call( event, "type" ) ? event.type : event, - namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; - - cur = lastElement = tmp = elem = elem || document; - - // Don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - // focus/blur morphs to focusin/out; ensure we're not firing them right now - if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { - return; - } - - if ( type.indexOf( "." ) > -1 ) { - - // Namespaced trigger; create a regexp to match event type in handle() - namespaces = type.split( "." ); - type = namespaces.shift(); - namespaces.sort(); - } - ontype = type.indexOf( ":" ) < 0 && "on" + type; - - // Caller can pass in a jQuery.Event object, Object, or just an event type string - event = event[ jQuery.expando ] ? - event : - new jQuery.Event( type, typeof event === "object" && event ); - - // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) - event.isTrigger = onlyHandlers ? 2 : 3; - event.namespace = namespaces.join( "." ); - event.rnamespace = event.namespace ? - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : - null; - - // Clean up the event in case it is being reused - event.result = undefined; - if ( !event.target ) { - event.target = elem; - } - - // Clone any incoming data and prepend the event, creating the handler arg list - data = data == null ? - [ event ] : - jQuery.makeArray( data, [ event ] ); - - // Allow special events to draw outside the lines - special = jQuery.event.special[ type ] || {}; - if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { - return; - } - - // Determine event propagation path in advance, per W3C events spec (#9951) - // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) - if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) { - - bubbleType = special.delegateType || type; - if ( !rfocusMorph.test( bubbleType + type ) ) { - cur = cur.parentNode; - } - for ( ; cur; cur = cur.parentNode ) { - eventPath.push( cur ); - tmp = cur; - } - - // Only add window if we got to document (e.g., not plain obj or detached DOM) - if ( tmp === ( elem.ownerDocument || document ) ) { - eventPath.push( tmp.defaultView || tmp.parentWindow || window ); - } - } - - // Fire handlers on the event path - i = 0; - while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { - lastElement = cur; - event.type = i > 1 ? - bubbleType : - special.bindType || type; - - // jQuery handler - handle = ( - dataPriv.get( cur, "events" ) || Object.create( null ) - )[ event.type ] && - dataPriv.get( cur, "handle" ); - if ( handle ) { - handle.apply( cur, data ); - } - - // Native handler - handle = ontype && cur[ ontype ]; - if ( handle && handle.apply && acceptData( cur ) ) { - event.result = handle.apply( cur, data ); - if ( event.result === false ) { - event.preventDefault(); - } - } - } - event.type = type; - - // If nobody prevented the default action, do it now - if ( !onlyHandlers && !event.isDefaultPrevented() ) { - - if ( ( !special._default || - special._default.apply( eventPath.pop(), data ) === false ) && - acceptData( elem ) ) { - - // Call a native DOM method on the target with the same name as the event. - // Don't do default actions on window, that's where global variables be (#6170) - if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) { - - // Don't re-trigger an onFOO event when we call its FOO() method - tmp = elem[ ontype ]; - - if ( tmp ) { - elem[ ontype ] = null; - } - - // Prevent re-triggering of the same event, since we already bubbled it above - jQuery.event.triggered = type; - - if ( event.isPropagationStopped() ) { - lastElement.addEventListener( type, stopPropagationCallback ); - } - - elem[ type ](); - - if ( event.isPropagationStopped() ) { - lastElement.removeEventListener( type, stopPropagationCallback ); - } - - jQuery.event.triggered = undefined; - - if ( tmp ) { - elem[ ontype ] = tmp; - } - } - } - } - - return event.result; - }, - - // Piggyback on a donor event to simulate a different one - // Used only for `focus(in | out)` events - simulate: function( type, elem, event ) { - var e = jQuery.extend( - new jQuery.Event(), - event, - { - type: type, - isSimulated: true - } - ); - - jQuery.event.trigger( e, null, elem ); - } - -} ); - -jQuery.fn.extend( { - - trigger: function( type, data ) { - return this.each( function() { - jQuery.event.trigger( type, data, this ); - } ); - }, - triggerHandler: function( type, data ) { - var elem = this[ 0 ]; - if ( elem ) { - return jQuery.event.trigger( type, data, elem, true ); - } - } -} ); - - -// Support: Firefox <=44 -// Firefox doesn't have focus(in | out) events -// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 -// -// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 -// focus(in | out) events fire after focus & blur events, -// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order -// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 -if ( !support.focusin ) { - jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { - - // Attach a single capturing handler on the document while someone wants focusin/focusout - var handler = function( event ) { - jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); - }; - - jQuery.event.special[ fix ] = { - setup: function() { - - // Handle: regular nodes (via `this.ownerDocument`), window - // (via `this.document`) & document (via `this`). - var doc = this.ownerDocument || this.document || this, - attaches = dataPriv.access( doc, fix ); - - if ( !attaches ) { - doc.addEventListener( orig, handler, true ); - } - dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); - }, - teardown: function() { - var doc = this.ownerDocument || this.document || this, - attaches = dataPriv.access( doc, fix ) - 1; - - if ( !attaches ) { - doc.removeEventListener( orig, handler, true ); - dataPriv.remove( doc, fix ); - - } else { - dataPriv.access( doc, fix, attaches ); - } - } - }; - } ); -} -var location = window.location; - -var nonce = { guid: Date.now() }; - -var rquery = ( /\?/ ); - - - -// Cross-browser xml parsing -jQuery.parseXML = function( data ) { - var xml; - if ( !data || typeof data !== "string" ) { - return null; - } - - // Support: IE 9 - 11 only - // IE throws on parseFromString with invalid input. - try { - xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); - } catch ( e ) { - xml = undefined; - } - - if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) { - jQuery.error( "Invalid XML: " + data ); - } - return xml; -}; - - -var - rbracket = /\[\]$/, - rCRLF = /\r?\n/g, - rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, - rsubmittable = /^(?:input|select|textarea|keygen)/i; - -function buildParams( prefix, obj, traditional, add ) { - var name; - - if ( Array.isArray( obj ) ) { - - // Serialize array item. - jQuery.each( obj, function( i, v ) { - if ( traditional || rbracket.test( prefix ) ) { - - // Treat each array item as a scalar. - add( prefix, v ); - - } else { - - // Item is non-scalar (array or object), encode its numeric index. - buildParams( - prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", - v, - traditional, - add - ); - } - } ); - - } else if ( !traditional && toType( obj ) === "object" ) { - - // Serialize object item. - for ( name in obj ) { - buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); - } - - } else { - - // Serialize scalar item. - add( prefix, obj ); - } -} - -// Serialize an array of form elements or a set of -// key/values into a query string -jQuery.param = function( a, traditional ) { - var prefix, - s = [], - add = function( key, valueOrFunction ) { - - // If value is a function, invoke it and use its return value - var value = isFunction( valueOrFunction ) ? - valueOrFunction() : - valueOrFunction; - - s[ s.length ] = encodeURIComponent( key ) + "=" + - encodeURIComponent( value == null ? "" : value ); - }; - - if ( a == null ) { - return ""; - } - - // If an array was passed in, assume that it is an array of form elements. - if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { - - // Serialize the form elements - jQuery.each( a, function() { - add( this.name, this.value ); - } ); - - } else { - - // If traditional, encode the "old" way (the way 1.3.2 or older - // did it), otherwise encode params recursively. - for ( prefix in a ) { - buildParams( prefix, a[ prefix ], traditional, add ); - } - } - - // Return the resulting serialization - return s.join( "&" ); -}; - -jQuery.fn.extend( { - serialize: function() { - return jQuery.param( this.serializeArray() ); - }, - serializeArray: function() { - return this.map( function() { - - // Can add propHook for "elements" to filter or add form elements - var elements = jQuery.prop( this, "elements" ); - return elements ? jQuery.makeArray( elements ) : this; - } ) - .filter( function() { - var type = this.type; - - // Use .is( ":disabled" ) so that fieldset[disabled] works - return this.name && !jQuery( this ).is( ":disabled" ) && - rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && - ( this.checked || !rcheckableType.test( type ) ); - } ) - .map( function( _i, elem ) { - var val = jQuery( this ).val(); - - if ( val == null ) { - return null; - } - - if ( Array.isArray( val ) ) { - return jQuery.map( val, function( val ) { - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ); - } - - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ).get(); - } -} ); - - -var - r20 = /%20/g, - rhash = /#.*$/, - rantiCache = /([?&])_=[^&]*/, - rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, - - // #7653, #8125, #8152: local protocol detection - rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, - rnoContent = /^(?:GET|HEAD)$/, - rprotocol = /^\/\//, - - /* Prefilters - * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) - * 2) These are called: - * - BEFORE asking for a transport - * - AFTER param serialization (s.data is a string if s.processData is true) - * 3) key is the dataType - * 4) the catchall symbol "*" can be used - * 5) execution will start with transport dataType and THEN continue down to "*" if needed - */ - prefilters = {}, - - /* Transports bindings - * 1) key is the dataType - * 2) the catchall symbol "*" can be used - * 3) selection will start with transport dataType and THEN go to "*" if needed - */ - transports = {}, - - // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression - allTypes = "*/".concat( "*" ), - - // Anchor tag for parsing the document origin - originAnchor = document.createElement( "a" ); - originAnchor.href = location.href; - -// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport -function addToPrefiltersOrTransports( structure ) { - - // dataTypeExpression is optional and defaults to "*" - return function( dataTypeExpression, func ) { - - if ( typeof dataTypeExpression !== "string" ) { - func = dataTypeExpression; - dataTypeExpression = "*"; - } - - var dataType, - i = 0, - dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; - - if ( isFunction( func ) ) { - - // For each dataType in the dataTypeExpression - while ( ( dataType = dataTypes[ i++ ] ) ) { - - // Prepend if requested - if ( dataType[ 0 ] === "+" ) { - dataType = dataType.slice( 1 ) || "*"; - ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); - - // Otherwise append - } else { - ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); - } - } - } - }; -} - -// Base inspection function for prefilters and transports -function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { - - var inspected = {}, - seekingTransport = ( structure === transports ); - - function inspect( dataType ) { - var selected; - inspected[ dataType ] = true; - jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { - var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); - if ( typeof dataTypeOrTransport === "string" && - !seekingTransport && !inspected[ dataTypeOrTransport ] ) { - - options.dataTypes.unshift( dataTypeOrTransport ); - inspect( dataTypeOrTransport ); - return false; - } else if ( seekingTransport ) { - return !( selected = dataTypeOrTransport ); - } - } ); - return selected; - } - - return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); -} - -// A special extend for ajax options -// that takes "flat" options (not to be deep extended) -// Fixes #9887 -function ajaxExtend( target, src ) { - var key, deep, - flatOptions = jQuery.ajaxSettings.flatOptions || {}; - - for ( key in src ) { - if ( src[ key ] !== undefined ) { - ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; - } - } - if ( deep ) { - jQuery.extend( true, target, deep ); - } - - return target; -} - -/* Handles responses to an ajax request: - * - finds the right dataType (mediates between content-type and expected dataType) - * - returns the corresponding response - */ -function ajaxHandleResponses( s, jqXHR, responses ) { - - var ct, type, finalDataType, firstDataType, - contents = s.contents, - dataTypes = s.dataTypes; - - // Remove auto dataType and get content-type in the process - while ( dataTypes[ 0 ] === "*" ) { - dataTypes.shift(); - if ( ct === undefined ) { - ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); - } - } - - // Check if we're dealing with a known content-type - if ( ct ) { - for ( type in contents ) { - if ( contents[ type ] && contents[ type ].test( ct ) ) { - dataTypes.unshift( type ); - break; - } - } - } - - // Check to see if we have a response for the expected dataType - if ( dataTypes[ 0 ] in responses ) { - finalDataType = dataTypes[ 0 ]; - } else { - - // Try convertible dataTypes - for ( type in responses ) { - if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { - finalDataType = type; - break; - } - if ( !firstDataType ) { - firstDataType = type; - } - } - - // Or just use first one - finalDataType = finalDataType || firstDataType; - } - - // If we found a dataType - // We add the dataType to the list if needed - // and return the corresponding response - if ( finalDataType ) { - if ( finalDataType !== dataTypes[ 0 ] ) { - dataTypes.unshift( finalDataType ); - } - return responses[ finalDataType ]; - } -} - -/* Chain conversions given the request and the original response - * Also sets the responseXXX fields on the jqXHR instance - */ -function ajaxConvert( s, response, jqXHR, isSuccess ) { - var conv2, current, conv, tmp, prev, - converters = {}, - - // Work with a copy of dataTypes in case we need to modify it for conversion - dataTypes = s.dataTypes.slice(); - - // Create converters map with lowercased keys - if ( dataTypes[ 1 ] ) { - for ( conv in s.converters ) { - converters[ conv.toLowerCase() ] = s.converters[ conv ]; - } - } - - current = dataTypes.shift(); - - // Convert to each sequential dataType - while ( current ) { - - if ( s.responseFields[ current ] ) { - jqXHR[ s.responseFields[ current ] ] = response; - } - - // Apply the dataFilter if provided - if ( !prev && isSuccess && s.dataFilter ) { - response = s.dataFilter( response, s.dataType ); - } - - prev = current; - current = dataTypes.shift(); - - if ( current ) { - - // There's only work to do if current dataType is non-auto - if ( current === "*" ) { - - current = prev; - - // Convert response if prev dataType is non-auto and differs from current - } else if ( prev !== "*" && prev !== current ) { - - // Seek a direct converter - conv = converters[ prev + " " + current ] || converters[ "* " + current ]; - - // If none found, seek a pair - if ( !conv ) { - for ( conv2 in converters ) { - - // If conv2 outputs current - tmp = conv2.split( " " ); - if ( tmp[ 1 ] === current ) { - - // If prev can be converted to accepted input - conv = converters[ prev + " " + tmp[ 0 ] ] || - converters[ "* " + tmp[ 0 ] ]; - if ( conv ) { - - // Condense equivalence converters - if ( conv === true ) { - conv = converters[ conv2 ]; - - // Otherwise, insert the intermediate dataType - } else if ( converters[ conv2 ] !== true ) { - current = tmp[ 0 ]; - dataTypes.unshift( tmp[ 1 ] ); - } - break; - } - } - } - } - - // Apply converter (if not an equivalence) - if ( conv !== true ) { - - // Unless errors are allowed to bubble, catch and return them - if ( conv && s.throws ) { - response = conv( response ); - } else { - try { - response = conv( response ); - } catch ( e ) { - return { - state: "parsererror", - error: conv ? e : "No conversion from " + prev + " to " + current - }; - } - } - } - } - } - } - - return { state: "success", data: response }; -} - -jQuery.extend( { - - // Counter for holding the number of active queries - active: 0, - - // Last-Modified header cache for next request - lastModified: {}, - etag: {}, - - ajaxSettings: { - url: location.href, - type: "GET", - isLocal: rlocalProtocol.test( location.protocol ), - global: true, - processData: true, - async: true, - contentType: "application/x-www-form-urlencoded; charset=UTF-8", - - /* - timeout: 0, - data: null, - dataType: null, - username: null, - password: null, - cache: null, - throws: false, - traditional: false, - headers: {}, - */ - - accepts: { - "*": allTypes, - text: "text/plain", - html: "text/html", - xml: "application/xml, text/xml", - json: "application/json, text/javascript" - }, - - contents: { - xml: /\bxml\b/, - html: /\bhtml/, - json: /\bjson\b/ - }, - - responseFields: { - xml: "responseXML", - text: "responseText", - json: "responseJSON" - }, - - // Data converters - // Keys separate source (or catchall "*") and destination types with a single space - converters: { - - // Convert anything to text - "* text": String, - - // Text to html (true = no transformation) - "text html": true, - - // Evaluate text as a json expression - "text json": JSON.parse, - - // Parse text as xml - "text xml": jQuery.parseXML - }, - - // For options that shouldn't be deep extended: - // you can add your own custom options here if - // and when you create one that shouldn't be - // deep extended (see ajaxExtend) - flatOptions: { - url: true, - context: true - } - }, - - // Creates a full fledged settings object into target - // with both ajaxSettings and settings fields. - // If target is omitted, writes into ajaxSettings. - ajaxSetup: function( target, settings ) { - return settings ? - - // Building a settings object - ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : - - // Extending ajaxSettings - ajaxExtend( jQuery.ajaxSettings, target ); - }, - - ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), - ajaxTransport: addToPrefiltersOrTransports( transports ), - - // Main method - ajax: function( url, options ) { - - // If url is an object, simulate pre-1.5 signature - if ( typeof url === "object" ) { - options = url; - url = undefined; - } - - // Force options to be an object - options = options || {}; - - var transport, - - // URL without anti-cache param - cacheURL, - - // Response headers - responseHeadersString, - responseHeaders, - - // timeout handle - timeoutTimer, - - // Url cleanup var - urlAnchor, - - // Request state (becomes false upon send and true upon completion) - completed, - - // To know if global events are to be dispatched - fireGlobals, - - // Loop variable - i, - - // uncached part of the url - uncached, - - // Create the final options object - s = jQuery.ajaxSetup( {}, options ), - - // Callbacks context - callbackContext = s.context || s, - - // Context for global events is callbackContext if it is a DOM node or jQuery collection - globalEventContext = s.context && - ( callbackContext.nodeType || callbackContext.jquery ) ? - jQuery( callbackContext ) : - jQuery.event, - - // Deferreds - deferred = jQuery.Deferred(), - completeDeferred = jQuery.Callbacks( "once memory" ), - - // Status-dependent callbacks - statusCode = s.statusCode || {}, - - // Headers (they are sent all at once) - requestHeaders = {}, - requestHeadersNames = {}, - - // Default abort message - strAbort = "canceled", - - // Fake xhr - jqXHR = { - readyState: 0, - - // Builds headers hashtable if needed - getResponseHeader: function( key ) { - var match; - if ( completed ) { - if ( !responseHeaders ) { - responseHeaders = {}; - while ( ( match = rheaders.exec( responseHeadersString ) ) ) { - responseHeaders[ match[ 1 ].toLowerCase() + " " ] = - ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] ) - .concat( match[ 2 ] ); - } - } - match = responseHeaders[ key.toLowerCase() + " " ]; - } - return match == null ? null : match.join( ", " ); - }, - - // Raw string - getAllResponseHeaders: function() { - return completed ? responseHeadersString : null; - }, - - // Caches the header - setRequestHeader: function( name, value ) { - if ( completed == null ) { - name = requestHeadersNames[ name.toLowerCase() ] = - requestHeadersNames[ name.toLowerCase() ] || name; - requestHeaders[ name ] = value; - } - return this; - }, - - // Overrides response content-type header - overrideMimeType: function( type ) { - if ( completed == null ) { - s.mimeType = type; - } - return this; - }, - - // Status-dependent callbacks - statusCode: function( map ) { - var code; - if ( map ) { - if ( completed ) { - - // Execute the appropriate callbacks - jqXHR.always( map[ jqXHR.status ] ); - } else { - - // Lazy-add the new callbacks in a way that preserves old ones - for ( code in map ) { - statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; - } - } - } - return this; - }, - - // Cancel the request - abort: function( statusText ) { - var finalText = statusText || strAbort; - if ( transport ) { - transport.abort( finalText ); - } - done( 0, finalText ); - return this; - } - }; - - // Attach deferreds - deferred.promise( jqXHR ); - - // Add protocol if not provided (prefilters might expect it) - // Handle falsy url in the settings object (#10093: consistency with old signature) - // We also use the url parameter if available - s.url = ( ( url || s.url || location.href ) + "" ) - .replace( rprotocol, location.protocol + "//" ); - - // Alias method option to type as per ticket #12004 - s.type = options.method || options.type || s.method || s.type; - - // Extract dataTypes list - s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; - - // A cross-domain request is in order when the origin doesn't match the current origin. - if ( s.crossDomain == null ) { - urlAnchor = document.createElement( "a" ); - - // Support: IE <=8 - 11, Edge 12 - 15 - // IE throws exception on accessing the href property if url is malformed, - // e.g. http://example.com:80x/ - try { - urlAnchor.href = s.url; - - // Support: IE <=8 - 11 only - // Anchor's host property isn't correctly set when s.url is relative - urlAnchor.href = urlAnchor.href; - s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== - urlAnchor.protocol + "//" + urlAnchor.host; - } catch ( e ) { - - // If there is an error parsing the URL, assume it is crossDomain, - // it can be rejected by the transport if it is invalid - s.crossDomain = true; - } - } - - // Convert data if not already a string - if ( s.data && s.processData && typeof s.data !== "string" ) { - s.data = jQuery.param( s.data, s.traditional ); - } - - // Apply prefilters - inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); - - // If request was aborted inside a prefilter, stop there - if ( completed ) { - return jqXHR; - } - - // We can fire global events as of now if asked to - // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) - fireGlobals = jQuery.event && s.global; - - // Watch for a new set of requests - if ( fireGlobals && jQuery.active++ === 0 ) { - jQuery.event.trigger( "ajaxStart" ); - } - - // Uppercase the type - s.type = s.type.toUpperCase(); - - // Determine if request has content - s.hasContent = !rnoContent.test( s.type ); - - // Save the URL in case we're toying with the If-Modified-Since - // and/or If-None-Match header later on - // Remove hash to simplify url manipulation - cacheURL = s.url.replace( rhash, "" ); - - // More options handling for requests with no content - if ( !s.hasContent ) { - - // Remember the hash so we can put it back - uncached = s.url.slice( cacheURL.length ); - - // If data is available and should be processed, append data to url - if ( s.data && ( s.processData || typeof s.data === "string" ) ) { - cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; - - // #9682: remove data so that it's not used in an eventual retry - delete s.data; - } - - // Add or update anti-cache param if needed - if ( s.cache === false ) { - cacheURL = cacheURL.replace( rantiCache, "$1" ); - uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce.guid++ ) + - uncached; - } - - // Put hash and anti-cache on the URL that will be requested (gh-1732) - s.url = cacheURL + uncached; - - // Change '%20' to '+' if this is encoded form body content (gh-2658) - } else if ( s.data && s.processData && - ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { - s.data = s.data.replace( r20, "+" ); - } - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - if ( jQuery.lastModified[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); - } - if ( jQuery.etag[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); - } - } - - // Set the correct header, if data is being sent - if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { - jqXHR.setRequestHeader( "Content-Type", s.contentType ); - } - - // Set the Accepts header for the server, depending on the dataType - jqXHR.setRequestHeader( - "Accept", - s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? - s.accepts[ s.dataTypes[ 0 ] ] + - ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : - s.accepts[ "*" ] - ); - - // Check for headers option - for ( i in s.headers ) { - jqXHR.setRequestHeader( i, s.headers[ i ] ); - } - - // Allow custom headers/mimetypes and early abort - if ( s.beforeSend && - ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { - - // Abort if not done already and return - return jqXHR.abort(); - } - - // Aborting is no longer a cancellation - strAbort = "abort"; - - // Install callbacks on deferreds - completeDeferred.add( s.complete ); - jqXHR.done( s.success ); - jqXHR.fail( s.error ); - - // Get transport - transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); - - // If no transport, we auto-abort - if ( !transport ) { - done( -1, "No Transport" ); - } else { - jqXHR.readyState = 1; - - // Send global event - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); - } - - // If request was aborted inside ajaxSend, stop there - if ( completed ) { - return jqXHR; - } - - // Timeout - if ( s.async && s.timeout > 0 ) { - timeoutTimer = window.setTimeout( function() { - jqXHR.abort( "timeout" ); - }, s.timeout ); - } - - try { - completed = false; - transport.send( requestHeaders, done ); - } catch ( e ) { - - // Rethrow post-completion exceptions - if ( completed ) { - throw e; - } - - // Propagate others as results - done( -1, e ); - } - } - - // Callback for when everything is done - function done( status, nativeStatusText, responses, headers ) { - var isSuccess, success, error, response, modified, - statusText = nativeStatusText; - - // Ignore repeat invocations - if ( completed ) { - return; - } - - completed = true; - - // Clear timeout if it exists - if ( timeoutTimer ) { - window.clearTimeout( timeoutTimer ); - } - - // Dereference transport for early garbage collection - // (no matter how long the jqXHR object will be used) - transport = undefined; - - // Cache response headers - responseHeadersString = headers || ""; - - // Set readyState - jqXHR.readyState = status > 0 ? 4 : 0; - - // Determine if successful - isSuccess = status >= 200 && status < 300 || status === 304; - - // Get response data - if ( responses ) { - response = ajaxHandleResponses( s, jqXHR, responses ); - } - - // Use a noop converter for missing script - if ( !isSuccess && jQuery.inArray( "script", s.dataTypes ) > -1 ) { - s.converters[ "text script" ] = function() {}; - } - - // Convert no matter what (that way responseXXX fields are always set) - response = ajaxConvert( s, response, jqXHR, isSuccess ); - - // If successful, handle type chaining - if ( isSuccess ) { - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - modified = jqXHR.getResponseHeader( "Last-Modified" ); - if ( modified ) { - jQuery.lastModified[ cacheURL ] = modified; - } - modified = jqXHR.getResponseHeader( "etag" ); - if ( modified ) { - jQuery.etag[ cacheURL ] = modified; - } - } - - // if no content - if ( status === 204 || s.type === "HEAD" ) { - statusText = "nocontent"; - - // if not modified - } else if ( status === 304 ) { - statusText = "notmodified"; - - // If we have data, let's convert it - } else { - statusText = response.state; - success = response.data; - error = response.error; - isSuccess = !error; - } - } else { - - // Extract error from statusText and normalize for non-aborts - error = statusText; - if ( status || !statusText ) { - statusText = "error"; - if ( status < 0 ) { - status = 0; - } - } - } - - // Set data for the fake xhr object - jqXHR.status = status; - jqXHR.statusText = ( nativeStatusText || statusText ) + ""; - - // Success/Error - if ( isSuccess ) { - deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); - } else { - deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); - } - - // Status-dependent callbacks - jqXHR.statusCode( statusCode ); - statusCode = undefined; - - if ( fireGlobals ) { - globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", - [ jqXHR, s, isSuccess ? success : error ] ); - } - - // Complete - completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); - - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); - - // Handle the global AJAX counter - if ( !( --jQuery.active ) ) { - jQuery.event.trigger( "ajaxStop" ); - } - } - } - - return jqXHR; - }, - - getJSON: function( url, data, callback ) { - return jQuery.get( url, data, callback, "json" ); - }, - - getScript: function( url, callback ) { - return jQuery.get( url, undefined, callback, "script" ); - } -} ); - -jQuery.each( [ "get", "post" ], function( _i, method ) { - jQuery[ method ] = function( url, data, callback, type ) { - - // Shift arguments if data argument was omitted - if ( isFunction( data ) ) { - type = type || callback; - callback = data; - data = undefined; - } - - // The url can be an options object (which then must have .url) - return jQuery.ajax( jQuery.extend( { - url: url, - type: method, - dataType: type, - data: data, - success: callback - }, jQuery.isPlainObject( url ) && url ) ); - }; -} ); - -jQuery.ajaxPrefilter( function( s ) { - var i; - for ( i in s.headers ) { - if ( i.toLowerCase() === "content-type" ) { - s.contentType = s.headers[ i ] || ""; - } - } -} ); - - -jQuery._evalUrl = function( url, options, doc ) { - return jQuery.ajax( { - url: url, - - // Make this explicit, since user can override this through ajaxSetup (#11264) - type: "GET", - dataType: "script", - cache: true, - async: false, - global: false, - - // Only evaluate the response if it is successful (gh-4126) - // dataFilter is not invoked for failure responses, so using it instead - // of the default converter is kludgy but it works. - converters: { - "text script": function() {} - }, - dataFilter: function( response ) { - jQuery.globalEval( response, options, doc ); - } - } ); -}; - - -jQuery.fn.extend( { - wrapAll: function( html ) { - var wrap; - - if ( this[ 0 ] ) { - if ( isFunction( html ) ) { - html = html.call( this[ 0 ] ); - } - - // The elements to wrap the target around - wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); - - if ( this[ 0 ].parentNode ) { - wrap.insertBefore( this[ 0 ] ); - } - - wrap.map( function() { - var elem = this; - - while ( elem.firstElementChild ) { - elem = elem.firstElementChild; - } - - return elem; - } ).append( this ); - } - - return this; - }, - - wrapInner: function( html ) { - if ( isFunction( html ) ) { - return this.each( function( i ) { - jQuery( this ).wrapInner( html.call( this, i ) ); - } ); - } - - return this.each( function() { - var self = jQuery( this ), - contents = self.contents(); - - if ( contents.length ) { - contents.wrapAll( html ); - - } else { - self.append( html ); - } - } ); - }, - - wrap: function( html ) { - var htmlIsFunction = isFunction( html ); - - return this.each( function( i ) { - jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html ); - } ); - }, - - unwrap: function( selector ) { - this.parent( selector ).not( "body" ).each( function() { - jQuery( this ).replaceWith( this.childNodes ); - } ); - return this; - } -} ); - - -jQuery.expr.pseudos.hidden = function( elem ) { - return !jQuery.expr.pseudos.visible( elem ); -}; -jQuery.expr.pseudos.visible = function( elem ) { - return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); -}; - - - - -jQuery.ajaxSettings.xhr = function() { - try { - return new window.XMLHttpRequest(); - } catch ( e ) {} -}; - -var xhrSuccessStatus = { - - // File protocol always yields status code 0, assume 200 - 0: 200, - - // Support: IE <=9 only - // #1450: sometimes IE returns 1223 when it should be 204 - 1223: 204 - }, - xhrSupported = jQuery.ajaxSettings.xhr(); - -support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); -support.ajax = xhrSupported = !!xhrSupported; - -jQuery.ajaxTransport( function( options ) { - var callback, errorCallback; - - // Cross domain only allowed if supported through XMLHttpRequest - if ( support.cors || xhrSupported && !options.crossDomain ) { - return { - send: function( headers, complete ) { - var i, - xhr = options.xhr(); - - xhr.open( - options.type, - options.url, - options.async, - options.username, - options.password - ); - - // Apply custom fields if provided - if ( options.xhrFields ) { - for ( i in options.xhrFields ) { - xhr[ i ] = options.xhrFields[ i ]; - } - } - - // Override mime type if needed - if ( options.mimeType && xhr.overrideMimeType ) { - xhr.overrideMimeType( options.mimeType ); - } - - // X-Requested-With header - // For cross-domain requests, seeing as conditions for a preflight are - // akin to a jigsaw puzzle, we simply never set it to be sure. - // (it can always be set on a per-request basis or even using ajaxSetup) - // For same-domain requests, won't change header if already provided. - if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { - headers[ "X-Requested-With" ] = "XMLHttpRequest"; - } - - // Set headers - for ( i in headers ) { - xhr.setRequestHeader( i, headers[ i ] ); - } - - // Callback - callback = function( type ) { - return function() { - if ( callback ) { - callback = errorCallback = xhr.onload = - xhr.onerror = xhr.onabort = xhr.ontimeout = - xhr.onreadystatechange = null; - - if ( type === "abort" ) { - xhr.abort(); - } else if ( type === "error" ) { - - // Support: IE <=9 only - // On a manual native abort, IE9 throws - // errors on any property access that is not readyState - if ( typeof xhr.status !== "number" ) { - complete( 0, "error" ); - } else { - complete( - - // File: protocol always yields status 0; see #8605, #14207 - xhr.status, - xhr.statusText - ); - } - } else { - complete( - xhrSuccessStatus[ xhr.status ] || xhr.status, - xhr.statusText, - - // Support: IE <=9 only - // IE9 has no XHR2 but throws on binary (trac-11426) - // For XHR2 non-text, let the caller handle it (gh-2498) - ( xhr.responseType || "text" ) !== "text" || - typeof xhr.responseText !== "string" ? - { binary: xhr.response } : - { text: xhr.responseText }, - xhr.getAllResponseHeaders() - ); - } - } - }; - }; - - // Listen to events - xhr.onload = callback(); - errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" ); - - // Support: IE 9 only - // Use onreadystatechange to replace onabort - // to handle uncaught aborts - if ( xhr.onabort !== undefined ) { - xhr.onabort = errorCallback; - } else { - xhr.onreadystatechange = function() { - - // Check readyState before timeout as it changes - if ( xhr.readyState === 4 ) { - - // Allow onerror to be called first, - // but that will not handle a native abort - // Also, save errorCallback to a variable - // as xhr.onerror cannot be accessed - window.setTimeout( function() { - if ( callback ) { - errorCallback(); - } - } ); - } - }; - } - - // Create the abort callback - callback = callback( "abort" ); - - try { - - // Do send the request (this may raise an exception) - xhr.send( options.hasContent && options.data || null ); - } catch ( e ) { - - // #14683: Only rethrow if this hasn't been notified as an error yet - if ( callback ) { - throw e; - } - } - }, - - abort: function() { - if ( callback ) { - callback(); - } - } - }; - } -} ); - - - - -// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) -jQuery.ajaxPrefilter( function( s ) { - if ( s.crossDomain ) { - s.contents.script = false; - } -} ); - -// Install script dataType -jQuery.ajaxSetup( { - accepts: { - script: "text/javascript, application/javascript, " + - "application/ecmascript, application/x-ecmascript" - }, - contents: { - script: /\b(?:java|ecma)script\b/ - }, - converters: { - "text script": function( text ) { - jQuery.globalEval( text ); - return text; - } - } -} ); - -// Handle cache's special case and crossDomain -jQuery.ajaxPrefilter( "script", function( s ) { - if ( s.cache === undefined ) { - s.cache = false; - } - if ( s.crossDomain ) { - s.type = "GET"; - } -} ); - -// Bind script tag hack transport -jQuery.ajaxTransport( "script", function( s ) { - - // This transport only deals with cross domain or forced-by-attrs requests - if ( s.crossDomain || s.scriptAttrs ) { - var script, callback; - return { - send: function( _, complete ) { - script = jQuery( " - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Analysis

-

Analyse mixer ensembles to extract static insights and train predict-time models for dynamic insights.

-
-
-class analysis.AccStats(deps=('ICP',))[source]
-

Computes accuracy stats and a confusion matrix for the validation dataset

-
-
-analyze(info, **kwargs)[source]
-

This method should be called once during the analysis phase, or not called at all. -It computes any information that the block may either output to the model analysis object, -or use at inference time when .explain() is called (in this case, make sure all needed -objects are added to the runtime analyzer so that .explain() can access them).

-
-
Parameters
-

info (Dict[str, object]) – Dictionary where any new information or objects are added. The next analysis block will use

-
-
-

the output of the previous block as a starting point. -:param kwargs: Dictionary with named variables from either the core analysis or the rest of the prediction -pipeline.

-
-
Return type
-

Dict[str, object]

-
-
-
- -
- -
-
-class analysis.BaseAnalysisBlock(deps=())[source]
-

Class to be inherited by any analysis/explainer block.

-
-
-analyze(info, **kwargs)[source]
-

This method should be called once during the analysis phase, or not called at all. -It computes any information that the block may either output to the model analysis object, -or use at inference time when .explain() is called (in this case, make sure all needed -objects are added to the runtime analyzer so that .explain() can access them).

-
-
Parameters
-

info (Dict[str, object]) – Dictionary where any new information or objects are added. The next analysis block will use

-
-
-

the output of the previous block as a starting point. -:param kwargs: Dictionary with named variables from either the core analysis or the rest of the prediction -pipeline.

-
-
Return type
-

Dict[str, object]

-
-
-
- -
-
-explain(row_insights, global_insights, **kwargs)[source]
-

This method should be called once during the explaining phase at inference time, or not called at all. -Additional explanations can be at an instance level (row-wise) or global. -For the former, return a data frame with any new insights. For the latter, a dictionary is required.

-
-
Parameters
-
    -
  • row_insights (DataFrame) – dataframe with previously computed row-level explanations.

  • -
  • global_insights (Dict[str, object]) – dict() with any explanations that concern all predicted instances or the model itself.

  • -
-
-
Return type
-

Tuple[DataFrame, Dict[str, object]]

-
-
Returns
-

    -
  • row_insights: modified input dataframe with any new row insights added here.

  • -
  • global_insights: dict() with any explanations that concern all predicted instances or the model itself.

  • -
-

-
-
-
- -
- -
-
-class analysis.GlobalFeatureImportance(disable_column_importance)[source]
-

Analysis block that estimates column importance with a variant of the LOCO (leave-one-covariate-out) algorithm.

-
-
Roughly speaking, the procedure:
    -
  • iterates over all input columns

  • -
  • if the input column is optional, then make a predict with its values set to None

  • -
  • compare this accuracy with the accuracy obtained using all data

  • -
  • all accuracy differences are passed through a softmax and reported as estimated column importance scores

  • -
-
-
-

Note that, crucially, this method does not refit the predictor at any point.

-
-
Reference:

https://compstat-lmu.github.io/iml_methods_limitations/pfi.html

-
-
-
-
-analyze(info, **kwargs)[source]
-

This method should be called once during the analysis phase, or not called at all. -It computes any information that the block may either output to the model analysis object, -or use at inference time when .explain() is called (in this case, make sure all needed -objects are added to the runtime analyzer so that .explain() can access them).

-
-
Parameters
-

info (Dict[str, object]) – Dictionary where any new information or objects are added. The next analysis block will use

-
-
-

the output of the previous block as a starting point. -:param kwargs: Dictionary with named variables from either the core analysis or the rest of the prediction -pipeline.

-
-
Return type
-

Dict[str, object]

-
-
-
- -
- -
-
-class analysis.ICP(fixed_significance, positive_domain, confidence_normalizer)[source]
-

Confidence estimation block, uses inductive conformal predictors (ICPs) for model agnosticity

-
-
-analyze(info, **kwargs)[source]
-

This method should be called once during the analysis phase, or not called at all. -It computes any information that the block may either output to the model analysis object, -or use at inference time when .explain() is called (in this case, make sure all needed -objects are added to the runtime analyzer so that .explain() can access them).

-
-
Parameters
-

info (Dict[str, object]) – Dictionary where any new information or objects are added. The next analysis block will use

-
-
-

the output of the previous block as a starting point. -:param kwargs: Dictionary with named variables from either the core analysis or the rest of the prediction -pipeline.

-
-
Return type
-

Dict[str, object]

-
-
-
- -
-
-explain(row_insights, global_insights, **kwargs)[source]
-

This method should be called once during the explaining phase at inference time, or not called at all. -Additional explanations can be at an instance level (row-wise) or global. -For the former, return a data frame with any new insights. For the latter, a dictionary is required.

-
-
Parameters
-
    -
  • row_insights (DataFrame) – dataframe with previously computed row-level explanations.

  • -
  • global_insights (Dict[str, object]) – dict() with any explanations that concern all predicted instances or the model itself.

  • -
-
-
Return type
-

Tuple[DataFrame, Dict[str, object]]

-
-
Returns
-

    -
  • row_insights: modified input dataframe with any new row insights added here.

  • -
  • global_insights: dict() with any explanations that concern all predicted instances or the model itself.

  • -
-

-
-
-
- -
- -
-
-analysis.explain(data, encoded_data, predictions, timeseries_settings, analysis, target_name, target_dtype, positive_domain, fixed_confidence, anomaly_detection, anomaly_error_rate, anomaly_cooldown, explainer_blocks=[], ts_analysis={})[source]
-

This procedure runs at the end of every normal .predict() call. Its goal is to generate prediction insights, -potentially using information generated at the model analysis stage (e.g. confidence estimation).

-

As in analysis(), any user-specified analysis blocks (see class BaseAnalysisBlock) are also called here.

-
-
Returns
-

-
-
-

row_insights: a DataFrame containing predictions and all generated insights at a row-level.

-
- -
-
-analysis.model_analyzer(predictor, data, train_data, stats_info, target, ts_cfg, dtype_dict, accuracy_functions, analysis_blocks=[])[source]
-

Analyses model on a validation subset to evaluate accuracy, estimate feature importance and generate a -calibration model to estimating confidence in future predictions.

-

Additionally, any user-specified analysis blocks (see class BaseAnalysisBlock) are also called here.

-
-
Return type
-

Tuple[ModelAnalysis, Dict[str, object]]

-
-
Returns
-

-
-
-

runtime_analyzer: This dictionary object gets populated in a sequential fashion with data generated from -any .analyze() block call. This dictionary object is stored in the predictor itself, and used when -calling the .explain() method of all analysis blocks when generating predictions.

-

model_analysis: ModelAnalysis object that contains core analysis metrics, not necessarily needed when predicting.

-
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/api.html b/docs/api.html deleted file mode 100644 index 911fa0ad0..000000000 --- a/docs/api.html +++ /dev/null @@ -1,270 +0,0 @@ - - - - - - - - - - API — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

API

-

The API module is how Lightwood interfaces with the user.

- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/api/dtype.html b/docs/api/dtype.html deleted file mode 100644 index 85ff8e886..000000000 --- a/docs/api/dtype.html +++ /dev/null @@ -1,281 +0,0 @@ - - - - - - - - - - Data Types (dtypes) — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Data Types (dtypes)

-

Lightwood supports several data types used in standard machine learning pipelines. The dtype class is used to label columns of information as the right input format. The type inference procedure affects what feature engineering methodology is used on a labeled column.

-

Currently, the supported way to encourage new data types is to include a custom tag in this file and to import a custom cleaning approach. Users may inherit the basic functionality of the cleaner and include their own flag specific to their data type. For steps on how to do this, please see the tutorials.

-
-
-class api.dtype.dtype[source]
-

Definitions of all data types currently supported. Dtypes currently supported include:

-
    -
  • Numerical: Data that should be represented in the form of a number. Currently integer, float, and quantity are supported.

  • -
  • Categorical: Data that represents a class or label and is discrete. Currently binary, categorical, and tags are supported.

  • -
  • Date/Time: Time-series data that is temporal/sequential. Currently date, and datetime are supported.

  • -
  • Text: Data that can be considered as language information. Currently short_text, and rich_text are supported. Short text has a small vocabulary (~ 100 words) and is generally a limited number of characters. Rich text is anything with greater complexity.

  • -
  • Complex: Data types that require custom techniques. Currently audio, video and image are available, but highly experimental.

  • -
  • Array: Data in the form of a sequence where order must be preserved. Currently array is the supported type.

  • -
  • Miscellaneous: Miscellaneous data descriptors include empty, an explicitly unknown value versus invalid, a data type not currently supported.

  • -
-

Custom data types may be implemented here as a flag for subsequent treatment and processing. You are welcome to include your own definitions, so long as they do not override the existing type names (alternatively, if you do, please edit subsequent parts of the preprocessing pipeline to correctly indicate how you want to deal with these data types).

-
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/api/encode.html b/docs/api/encode.html deleted file mode 100644 index e21e0d6b2..000000000 --- a/docs/api/encode.html +++ /dev/null @@ -1,260 +0,0 @@ - - - - - - - - - - Encode your data — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Encode your data

-
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/api/high_level.html b/docs/api/high_level.html deleted file mode 100644 index 42ef5251a..000000000 --- a/docs/api/high_level.html +++ /dev/null @@ -1,415 +0,0 @@ - - - - - - - - - - JSON-AI Config — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

JSON-AI Config

-
-
-api.high_level.analyze_dataset(df)[source]
-

You can use this to understand and visualize the data, it’s not a part of the pipeline one would use for creating and training predictive models.

-
-
Parameters
-

df (DataFrame) – The raw data

-
-
Return type
-

DataAnalysis

-
-
Returns
-

An object containing insights about the data (specifically the type information and statistical analysis)

-
-
-
- -
-
-api.high_level.code_from_json_ai(json_ai)[source]
-

Autogenerates custom code based on the details you specified inside your JsonAI.

-
-
Parameters
-

json_ai (JsonAI) – A JsonAI object

-
-
Return type
-

str

-
-
Returns
-

Code (text) generate based on the JsonAI you created

-
-
-
- -
-
-api.high_level.code_from_problem(df, problem_definition)[source]
-
-
Parameters
-
    -
  • df (DataFrame) – The raw data

  • -
  • problem_definition (Union[ProblemDefinition, dict]) – The manual specifications for your predictive problem

  • -
-
-
Return type
-

str

-
-
Returns
-

The text code generated based on your data and problem specifications

-
-
-
- -
-
-api.high_level.json_ai_from_problem(df, problem_definition)[source]
-

Creates a JsonAI from your raw data and problem definition. Usually you would use this when you want to subsequently edit the JsonAI, the easiest way to do this is to unload it to a dictionary via to_dict, modify it, and then create a new object from it using lightwood.JsonAI.from_dict. It’s usually better to generate the JsonAI using this function rather than writing it from scratch.

-
-
Parameters
-
    -
  • df (DataFrame) – The raw data

  • -
  • problem_definition (Union[ProblemDefinition, dict]) – The manual specifications for your predictive problem

  • -
-
-
Return type
-

JsonAI

-
-
Returns
-

A JsonAI object generated based on your data and problem specifications

-
-
-
- -
-
-api.high_level.predictor_from_code(code)[source]
-
-
Parameters
-

code (str) – The Predictor’s code in text form

-
-
Return type
-

PredictorInterface

-
-
Returns
-

A lightwood Predictor object

-
-
-
- -
-
-api.high_level.predictor_from_json_ai(json_ai)[source]
-

Creates a ready-to-train Predictor object based on the details you specified inside your JsonAI.

-
-
Parameters
-

json_ai (JsonAI) – A JsonAI object

-
-
Return type
-

PredictorInterface

-
-
Returns
-

A lightwood Predictor object

-
-
-
- -
-
-api.high_level.predictor_from_problem(df, problem_definition)[source]
-

Creates a ready-to-train Predictor object from some raw data and a ProblemDefinition. Do not use this if you want to edit the JsonAI first. Usually you’d want to next train this predictor by calling the learn method on the same dataframe used to create it.

-
-
Parameters
-
    -
  • df (DataFrame) – The raw data

  • -
  • problem_definition (Union[ProblemDefinition, dict]) – The manual specifications for your predictive problem

  • -
-
-
Return type
-

PredictorInterface

-
-
Returns
-

A lightwood Predictor object

-
-
-
- -
-
-api.high_level.predictor_from_state(state_file, code=None)[source]
-
-
Parameters
-
    -
  • state_file (str) – The file containing the pickle resulting from calling save on a Predictor object

  • -
  • code (Optional[str]) – The Predictor’s code in text form

  • -
-
-
Return type
-

PredictorInterface

-
-
Returns
-

A lightwood Predictor object

-
-
-
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/api/json_ai.html b/docs/api/json_ai.html deleted file mode 100644 index 53c6de222..000000000 --- a/docs/api/json_ai.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - - - JSON-AI Config — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

JSON-AI Config

-
-
-api.json_ai.code_from_json_ai(json_ai)[source]
-

Generates a custom PredictorInterface given the specifications from JsonAI object.

-
-
Parameters
-

json_ai (JsonAI) – JsonAI object with fully specified parameters

-
-
Return type
-

str

-
-
Returns
-

Automated syntax of the PredictorInterface object.

-
-
-
- -
-
-api.json_ai.generate_json_ai(type_information, statistical_analysis, problem_definition)[source]
-

Given TypeInformation, StatisticalAnalysis, and the ProblemDefinition, generate a JSON config file with the necessary elements of the ML pipeline populated.

-
-
Parameters
-
    -
  • TypeInformation – Specifies what data types each column within the dataset are

  • -
  • statistical_analysis (StatisticalAnalysis) –

  • -
  • problem_definition (ProblemDefinition) – Specifies details of the model training/building procedure, as defined by ProblemDefinition

  • -
-
-
Return type
-

JsonAI

-
-
Returns
-

JSON-AI object with fully populated details of the ML pipeline

-
-
-
- -
-
-api.json_ai.lookup_encoder(col_dtype, col_name, is_target, problem_defintion, is_target_predicting_encoder, statistical_analysis)[source]
-

Assign a default encoder for a given column based on its data type, and whether it is a target. Encoders intake raw (but cleaned) data and return an feature representation. This function assigns, per data type, what the featurizer should be. This function runs on each column within the dataset available for model building to assign how it should be featurized.

-

Users may override to create a custom encoder to enable their own featurization process. However, in order to generate template JSON-AI, this code runs automatically. Users may edit the generated syntax and use custom approaches while model building.

-

For each encoder, “args” may be passed. These args depend an encoder requires during its preparation call.

-
-
Parameters
-
    -
  • col_dtype (str) – A data-type of a column specified

  • -
  • col_name (str) – The name of the column

  • -
  • is_target (bool) – Whether the column is the target for prediction. If true, only certain possible feature representations are allowed, particularly for complex data types.

  • -
  • problem_definition – The ProblemDefinition criteria; this populates specifics on how models and encoders may be trained.

  • -
  • is_target_predicting_encoder (bool) –

  • -
-
-
-
- -
-
-api.json_ai.validate_json_ai(json_ai)[source]
-

Checks the validity of a JsonAI object

-
-
Parameters
-

json_ai (JsonAI) – A JsonAI object

-
-
Return type
-

bool

-
-
Returns
-

Whether the JsonAI is valid, i.e. doesn’t contain prohibited values, unknown values and can be turned into code.

-
-
-
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/api/predictor.html b/docs/api/predictor.html deleted file mode 100644 index 0acb78b7b..000000000 --- a/docs/api/predictor.html +++ /dev/null @@ -1,489 +0,0 @@ - - - - - - - - - - Predictor Interface — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Predictor Interface

-

The PredictorInterface creates the skeletal structure around basic functionality of Lightwood.

-
-
-class api.predictor.PredictorInterface[source]
-

Abstraction of a Lightwood predictor. The PredictorInterface encompasses how Lightwood interacts with the full ML pipeline. Internally,

-

The PredictorInterface class must have several expected functions:

-
    -
  • analyze_data: Peform a statistical analysis on the unprocessed data; this helps inform downstream encoders and mixers on how to treat the data types.

  • -
  • preprocess: Apply cleaning functions to each of the columns within the dataset to prepare them for featurization

  • -
  • split: Split the input dataset into a train/dev/test set according to your splitter function

  • -
  • prepare: Create and, if necessary, train your encoders to create feature representations from each column of your data.

  • -
  • featurize: For input, pre-processed data, create feature vectors

  • -
  • fit: Train your mixer models to yield predictions from featurized data

  • -
  • analyze_ensemble: Evaluate the quality of fit for your mixer models

  • -
  • adjust: Incorporate new data to update pre-existing model(s).

  • -
-

For simplification, we offer an end-to-end approach that allows you to input raw data and follow every step of the process until you reach a trained predictor with the learn function:

-
-
    -
  • learn: An end-to-end technique specifying how to pre-process, featurize, and train the model(s) of interest. The expected input is raw, untrained data. No explicit output is provided, but the Predictor object will “host” the trained model thus.

  • -
-
-

You can also use the predictor to now estimate new data:

-
    -
  • predict: Deploys the chosen best model, and evaluates the given data to provide target estimates.

  • -
  • save: Saves the Predictor object for further use.

  • -
-

The PredictorInterface is created via J{ai}son’s custom code creation. A problem inherits from this class with pre-populated routines to fill out expected results, given the nature of each problem type.

-
-
-adjust(new_data)[source]
-

Adjusts a previously trained model on new data. Adopts the same process as learn but with the exception that the adjust function expects the best model to have been already trained.

-
-

Warning

-

This is experimental and subject to change.

-
-
-
Parameters
-

new_data (Dict[str, DataFrame]) – New data used to adjust a previously trained model. Keys must reference “old” and “new” referencing to the old and new datasets. In some situations, the old data is still required to train a model (i.e. Regression) to ensure the new data doesn’t entirely override it.

-
-
Return type
-

None

-
-
Returns
-

Nothing; adjusts best-fit model

-
-
-
- -
-
-analyze_data(data)[source]
-

Performs a statistical analysis on the data to identify distributions, imbalanced classes, and other nuances within the data.

-
-
Parameters
-

data (DataFrame) – Data used in training the model(s).

-
-
Return type
-

None

-
-
-
- -
-
-analyze_ensemble(enc_data)[source]
-

Evaluate the quality of mixers within an ensemble of models.

-
-
Parameters
-

enc_data (Dict[str, DataFrame]) – Pre-processed and featurized data, split into the relevant train/test splits.

-
-
Return type
-

None

-
-
-
- -
-
-featurize(split_data)[source]
-

Provides an encoded representation for each dataset in split_data. Requires self.encoders to be prepared.

-
-
Parameters
-

split_data (Dict[str, DataFrame]) – Pre-processed data from the dataset, split into train/test (or any other keys relevant)

-
-
Returns
-

For each dataset provided in split_data, the encoded representations of the data.

-
-
-
- -
-
-fit(enc_data)[source]
-

Fits “mixer” models to train predictors on the featurized data. Instantiates a set of trained mixers and an ensemble of them.

-
-
Parameters
-

enc_data (Dict[str, DataFrame]) – Pre-processed and featurized data, split into the relevant train/test splits. Keys expected are “train”, “dev”, and “test”

-
-
Return type
-

None

-
-
-
- -
-
-learn(data)[source]
-

Trains the attribute model starting from raw data. Raw data is pre-processed and cleaned accordingly. As data is assigned a particular type (ex: numerical, categorical, etc.), the respective feature encoder will convert it into a representation useable for training ML models. Of all ML models requested, these models are compiled and fit on the training data.

-

This step amalgates preprocess -> featurize -> fit with the necessary splitting + analyze_data that occurs.

-
-
Parameters
-

data (DataFrame) – (Unprocessed) Data used in training the model(s).

-
-
Return type
-

None

-
-
Returns
-

Nothing; instantiates with best fit model from ensemble.

-
-
-
- -
-
-predict(data, args={})[source]
-

Intakes raw data to provide predicted values for your trained model.

-
-
Parameters
-
    -
  • data (DataFrame) – Data (n_samples, n_columns) that the model(s) will evaluate on and provide the target prediction.

  • -
  • args (Dict[str, object]) – parameters needed to update the predictor PredictionArguments object, which holds any parameters relevant for prediction.

  • -
-
-
Return type
-

DataFrame

-
-
Returns
-

A dataframe of predictions of the same length of input.

-
-
-
- -
-
-prepare(data)[source]
-

Prepares the encoders for each column of data.

-
-
Parameters
-

data (Dict[str, DataFrame]) – Pre-processed data that has been split into train/test. Explicitly uses “train” and/or “dev” in preparation of encoders.

-
-
Return type
-

None

-
-
Returns
-

Nothing; prepares the encoders for learned representations.

-
-
-
- -
-
-preprocess(data)[source]
-

Cleans the unprocessed dataset provided.

-
-
Parameters
-

data (DataFrame) – (Unprocessed) Data used in training the model(s).

-
-
Return type
-

DataFrame

-
-
Returns
-

The cleaned data frame

-
-
-
- -
-
-save(file_path)[source]
-

With a provided file path, saves the Predictor instance for later use.

-
-
Parameters
-

file_path (str) – Location to store your Predictor Instance.

-
-
Return type
-

None

-
-
Returns
-

Saves Predictor instance.

-
-
-
- -
-
-split(data)[source]
-

Categorizes the data into a training/testing split; if data is a classification problem, will stratify the data.

-
-
Parameters
-

data (DataFrame) – Pre-processed data, but generically any dataset to split into train/dev/test.

-
-
Return type
-

Dict[str, DataFrame]

-
-
Returns
-

Dictionary containing training/testing fraction

-
-
-
- -
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/api/types.html b/docs/api/types.html deleted file mode 100644 index 77f93c3ae..000000000 --- a/docs/api/types.html +++ /dev/null @@ -1,750 +0,0 @@ - - - - - - - - - - Lightwood API Types — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Lightwood API Types

-

Lightwood consists of several high level abstractions to enable the data science/machine learning (DS/ML) pipeline in a step-by-step procedure.

-
-
-class api.types.Module[source]
-

Modules are the blocks of code that end up being called from the JSON AI, representing either object instantiations or function calls.

-
-
Parameters
-
    -
  • module – Name of the module (function or class name)

  • -
  • args – Argument to pass to the function or constructor

  • -
-
-
-
- -
-
-class api.types.Feature(encoder, data_dtype=None, dependency=None)[source]
-

Within a dataframe, each column is considered its own “feature” (unless ignored etc.). The following expects each feature to have descriptions of the following:

-
-
Parameters
-
    -
  • encoder (Module) – the methodology for encoding a feature (a Lightwood Encoder)

  • -
  • data_dtype (Optional[str]) – The type of information within this column (ex.: numerical, categorical, etc.)

  • -
  • dependency (Optional[List[str]]) – Any custom attributes for this feature that may require non-standard processing. This highly depends on the encoder (ex: Pretrained text may be fine-tuned on the target; time-series requires prior time-steps).

  • -
-
-
-
-
-static from_dict(obj)[source]
-

Create Feature objects from the a dictionary representation.

-
-
Parameters
-

obj (Dict) – A dictionary representation of a column feature’s attributes. Must include keys encoder, data_dtype, and dependency.

-
-
Example
-

-
-
>>> my_dict = {"feature_A": {"encoder": MyEncoder, "data_dtype": "categorical", "dependency": None}}
->>> print(Feature.from_dict(my_dict["feature_A"]))
->>> Feature(encoder=None, data_dtype='categorical', dependency=None)
-
-
-
-
Returns
-

A Feature object with loaded information.

-
-
-
- -
-
-static from_json(data)[source]
-

Create Feature objects from JSON representation. This method calls on :ref: from_dict after loading the json config.

-
-
Parameters
-

data (str) – A JSON representation of the feature.

-
-
Returns
-

Loaded information into the Feature representation.

-
-
-
- -
-
-to_dict(encode_json=False)[source]
-

Converts a Feature to a dictionary representation.

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

A python dictionary with strings indicating the three key elements and their respective values of the Feature class.

-
-
-
- -
-
-to_json()[source]
-

Converts a Feature into a JSON object. Calls to_dict under the hood.

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

Json config syntax for the three key elements and their respective values of the Feature class.

-
-
-
- -
- -
-
-class api.types.Output(data_dtype, encoder=None, mixers=None, ensemble=None)[source]
-

A representation for the output feature. This is specifically used on the target column of your dataset. Four attributes are expected as seen below.

-

Note, currently supervised tasks are supported, hence categorical, numerical, and time-series are the expected outputs types. Complex features such as text generation are not currently available by default.

-
-
Parameters
-
    -
  • data_dtype (str) – The type of information within the target column (ex.: numerical, categorical, etc.).

  • -
  • encoder (Optional[str]) – the methodology for encoding the target feature (a Lightwood Encoder). There can only be one encoder for the output target.

  • -
  • mixers (Optional[List[str]]) – The list of ML algorithms that are trained for the target distribution.

  • -
  • ensemble (Optional[str]) – For a panel of ML algorithms, the approach of selecting the best mixer, and the metrics used in that evaluation.

  • -
-
-
-
- -
-
-class api.types.TypeInformation[source]
-

For a dataset, provides information on columns types, how they’re used, and any other potential identifiers.

-

TypeInformation is generated within data.infer_types, where small samples of each column are evaluated in a custom framework to understand what kind of data type the model is. The user may override data types, but it is recommended to do so within a JSON-AI config file.

-
-
Parameters
-
    -
  • dtypes – For each column’s name, the associated data type inferred.

  • -
  • additional_info – Any possible sub-categories or additional descriptive information.

  • -
  • identifiers – Columns within the dataset highly suspected of being identifiers or IDs. These do not contain informatic value, therefore will be ignored in subsequent training/analysis procedures unless manually indicated.

  • -
-
-
-
- -
-
-class api.types.StatisticalAnalysis(nr_rows, df_target_stddev, train_observed_classes, target_class_distribution, histograms, buckets, missing, distinct, bias, avg_words_per_sentence, positive_domain)[source]
-

The Statistical Analysis data class allows users to consider key descriptors of their data using simple techniques such as histograms, mean and standard deviation, word count, missing values, and any detected bias in the information.

-
-
Parameters
-
    -
  • nr_rows (int) – Number of rows (samples) in the dataset

  • -
  • df_target_stddev (Optional[float]) – The standard deviation of the target of the dataset

  • -
  • train_observed_classes (object) –

  • -
  • target_class_distribution (object) –

  • -
  • histograms (object) –

  • -
  • buckets (object) –

  • -
  • missing (object) –

  • -
  • distinct (object) –

  • -
  • bias (object) –

  • -
  • avg_words_per_sentence (object) –

  • -
  • positive_domain (bool) –

  • -
-
-
-
- -
-
-class api.types.DataAnalysis(statistical_analysis, type_information)[source]
-

Data Analysis wraps :class: .StatisticalAnalysis and :class: .TypeInformation together. Further details can be seen in their respective documentation references.

-
- -
-
-class api.types.TimeseriesSettings(is_timeseries, order_by=None, window=None, group_by=None, use_previous_target=True, nr_predictions=None, historical_columns=None, target_type='', allow_incomplete_history=False)[source]
-

For time-series specific problems, more specific treatment of the data is necessary. The following attributes enable time-series tasks to be carried out properly.

-
-
Parameters
-
    -
  • is_timeseries (bool) – Whether the input data should be treated as time series; if true, this flag is checked in subsequent internal steps to ensure processing is appropriate for time-series data.

  • -
  • order_by (Optional[List[str]]) – A list of columns by which the data should be ordered.

  • -
  • group_by (Optional[List[str]]) – Optional list of columns by which the data should be grouped. Each different combination of values for these columns will yield a different series.

  • -
  • window (Optional[int]) – The temporal horizon (number of rows) that a model intakes to “look back” into when making a prediction, after the rows are ordered by order_by columns and split into groups if applicable.

  • -
  • nr_predictions (Optional[int]) – The number of points in the future that predictions should be made for, defaults to 1. Once trained, the model will be able to predict up to this many points into the future.

  • -
  • historical_columns (Optional[List[str]]) – The temporal dynamics of these columns will be used as additional context to train the time series predictor. Note that a non-historical column shall still be used to forecast, but without considering their change through time.

  • -
  • target_type (str) – Automatically inferred dtype of the target (e.g. dtype.integer, dtype.float).

  • -
  • use_previous_target (bool) – Use the previous values of the target column to generate predictions. Defaults to True.

  • -
-
-
-
-
-static from_dict(obj)[source]
-

Creates a TimeseriesSettings object from python dictionary specifications.

-
-
Param
-

obj: A python dictionary with the necessary representation for time-series. The only mandatory columns are order_by and window.

-
-
Returns
-

A populated TimeseriesSettings object.

-
-
-
- -
-
-static from_json(data)[source]
-

Creates a TimeseriesSettings object from JSON specifications via python dictionary.

-
-
Param
-

data: JSON-config file with necessary Time-series specifications

-
-
Returns
-

A populated TimeseriesSettings object.

-
-
-
- -
-
-to_dict(encode_json=False)[source]
-

Creates a dictionary from TimeseriesSettings object

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

A python dictionary containing the TimeSeriesSettings specifications.

-
-
-
- -
-
-to_json()[source]
-

Creates JSON config from TimeseriesSettings object -:rtype: Dict[str, Union[dict, list, str, int, float, bool, None]] -:returns: The JSON config syntax containing the TimeSeriesSettings specifications.

-
- -
- -
-
-class api.types.ProblemDefinition(target, pct_invalid, unbias_target, seconds_per_mixer, seconds_per_encoder, time_aim, target_weights, positive_domain, timeseries_settings, anomaly_detection, ignore_features, fit_on_all, strict_mode, seed_nr)[source]
-

The ProblemDefinition object indicates details on how the models that predict the target are prepared. The only required specification from a user is the target, which indicates the column within the input data that the user is trying to predict. Within the ProblemDefinition, the user can specify aspects about how long the feature-engineering preparation may take, and nuances about training the models.

-
-
Parameters
-
    -
  • target (str) – The name of the target column; this is the column that will be used as the goal of the prediction.

  • -
  • pct_invalid (float) – Number of data points maximally tolerated as invalid/missing/unknown. If the data cleaning process exceeds this number, no subsequent steps will be taken.

  • -
  • unbias_target (bool) – all classes are automatically weighted inverse to how often they occur

  • -
  • seconds_per_mixer (Optional[int]) – Number of seconds maximum to spend PER mixer trained in the list of possible mixers.

  • -
  • seconds_per_encoder (Optional[int]) – Number of seconds maximum to spend when training an encoder that requires data to learn a representation.

  • -
  • time_aim (Optional[int]) – Time budget (in seconds) to train all needed components for the predictive tasks, including encoders and models.

  • -
  • target_weights (Optional[List[float]]) – indicates to the accuracy functions how much to weight every target class.

  • -
  • positive_domain (bool) – For numerical taks, force predictor output to be positive (integer or float).

  • -
  • timeseries_settings (TimeseriesSettings) – TimeseriesSettings object for time-series tasks, refer to its documentation for available settings.

  • -
  • anomaly_detection (bool) – Whether to conduct unsupervised anomaly detection; currently supported only for time- series.

  • -
  • ignore_features (List[str]) – The names of the columns the user wishes to ignore in the ML pipeline. Any column name found in this list will be automatically removed from subsequent steps in the ML pipeline.

  • -
  • fit_on_all (bool) – Whether to fit the model on the held-out validation data. Validation data is strictly used to evaluate how well a model is doing and is NEVER trained. However, in cases where users anticipate new incoming data over time, the user may train the model further using the entire dataset.

  • -
  • strict_mode (bool) – crash if an unstable block (mixer, encoder, etc.) fails to run.

  • -
  • seed_nr (int) – custom seed to use when generating a predictor from this problem definition.

  • -
-
-
-
-
-static from_dict(obj)[source]
-

Creates a ProblemDefinition object from a python dictionary with necessary specifications.

-
-
Parameters
-

obj (Dict) – A python dictionary with the necessary features for the ProblemDefinition class.

-
-
-

Only requires target to be specified.

-
-
Returns
-

A populated ProblemDefinition object.

-
-
-
- -
-
-static from_json(data)[source]
-

Creates a ProblemDefinition Object from JSON config file.

-
-
Parameters
-

data (str) –

-
-
Returns
-

A populated ProblemDefinition object.

-
-
-
- -
-
-to_dict(encode_json=False)[source]
-

Creates a python dictionary from the ProblemDefinition object

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

A python dictionary

-
-
-
- -
-
-to_json()[source]
-

Creates a JSON config from the ProblemDefinition object

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

TODO

-
-
-
- -
- -
-
-class api.types.JsonAI(features, outputs, problem_definition, identifiers, cleaner=None, splitter=None, analyzer=None, explainer=None, analysis_blocks=None, timeseries_transformer=None, timeseries_analyzer=None, accuracy_functions=None)[source]
-

The JsonAI Class allows users to construct flexible JSON config to specify their ML pipeline. JSON-AI follows a recipe of how to pre-process data, construct features, and train on the target column. To do so, the following specifications are required internally.

-
-
Parameters
-
    -
  • features (Dict[str, Feature]) – The corresponding``Feature`` object for each of the column names of the dataset

  • -
  • outputs (Dict[str, Output]) – The column name of the target and its Output object

  • -
  • problem_definition (ProblemDefinition) – The ProblemDefinition criteria.

  • -
  • identifiers (Dict[str, str]) – A dictionary of column names and respective data types that are likely identifiers/IDs within the data. Through the default cleaning process, these are ignored.

  • -
  • cleaner (Optional[Module]) – The Cleaner object represents the pre-processing step on a dataframe. The user can specify custom subroutines, if they choose, on how to handle preprocessing. Alternatively, “None” suggests Lightwood’s default approach in data.cleaner.

  • -
  • splitter (Optional[Module]) – The Splitter object is the method in which the input data is split into training/validation/testing data.

  • -
  • analyzer (Optional[Module]) – The Analyzer object is used to evaluate how well a model performed on the predictive task.

  • -
  • explainer (Optional[Module]) – The Explainer object deploys explainability tools of interest on a model to indicate how well a model generalizes its predictions.

  • -
  • analysis_blocks (Optional[List[Module]]) – The blocks that get used in both analysis and inference inside the analyzer and explainer blocks.

  • -
  • timeseries_transformer (Optional[Module]) – Procedure used to transform any timeseries task dataframe into the format that lightwood expects for the rest of the pipeline.

  • -
  • timeseries_analyzer (Optional[Module]) – Procedure that extracts key insights from any timeseries in the data (e.g. measurement frequency, target distribution, etc).

  • -
  • accuracy_functions (Optional[List[str]]) – A list of performance metrics used to evaluate the best mixers.

  • -
-
-
-
-
-static from_dict(obj)[source]
-

Creates a JSON-AI object from dictionary specifications of the JSON-config.

-
- -
-
-static from_json(data)[source]
-

Creates a JSON-AI object from JSON config

-
- -
-
-to_dict(encode_json=False)[source]
-

Creates a python dictionary with necessary modules within the ML pipeline specified from the JSON-AI object.

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

A python dictionary that has the necessary components of the ML pipeline for a given dataset.

-
-
-
- -
-
-to_json()[source]
-

Creates JSON config to represent the necessary modules within the ML pipeline specified from the JSON-AI object.

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

A JSON config that has the necessary components of the ML pipeline for a given dataset.

-
-
-
- -
- -
-
-class api.types.ModelAnalysis(accuracies, accuracy_histogram, accuracy_samples, train_sample_size, test_sample_size, column_importances, confusion_matrix, histograms, dtypes)[source]
-

The ModelAnalysis class stores useful information to describe a model and understand its predictive performance on a validation dataset. -For each trained ML algorithm, we store:

-
-
Parameters
-
    -
  • accuracies (Dict[str, float]) – Dictionary with obtained values for each accuracy function (specified in JsonAI)

  • -
  • accuracy_histogram (Dict[str, list]) – Dictionary with histograms of reported accuracy by target value.

  • -
  • accuracy_samples (Dict[str, list]) – Dictionary with sampled pairs of observed target values and respective predictions.

  • -
  • train_sample_size (int) – Size of the training set (data that parameters are updated on)

  • -
  • test_sample_size (int) – Size of the testing set (explicitly held out)

  • -
  • column_importances (Dict[str, float]) – Dictionary with the importance of each column for the model, as estimated by an approach that closely follows a leave-one-covariate-out strategy.

  • -
  • confusion_matrix (object) – A confusion matrix for the validation dataset.

  • -
  • histograms (object) – Histogram for each dataset feature.

  • -
  • dtypes (object) – Inferred data types for each dataset feature.

  • -
-
-
-
- -
-
-class api.types.PredictionArguments(predict_proba=False, all_mixers=False, fixed_confidence=None, anomaly_error_rate=None, anomaly_cooldown=1)[source]
-

This class contains all possible arguments that can be passed to a Lightwood predictor at inference time. -On each predict call, all arguments included in a parameter dictionary will update the respective fields -in the PredictionArguments instance that the predictor will have.

-
-
Parameters
-

predict_proba (bool) – triggers (where supported) predictions in raw probability output form. I.e. for classifiers,

-
-
-

instead of returning only the predicted class, the output additionally includes the assigned probability for -each class. -:type all_mixers: bool -:param all_mixers: forces an ensemble to return predictions emitted by all its internal mixers. -:type fixed_confidence: Union[int, float, None] -:param fixed_confidence: For analyzer module, specifies a fixed alpha confidence for the model calibration so that predictions, in average, are correct alpha percent of the time. -:type anomaly_error_rate: Optional[float] -:param anomaly_error_rate: Error rate for unsupervised anomaly detection. Bounded between 0.01 and 0.99 (respectively implies wider and tighter bounds, all other parameters being equal). -:type anomaly_cooldown: int -:param anomaly_cooldown: Sets the minimum amount of timesteps between consecutive firings of the the anomaly detector.

-
-
-static from_dict(obj)[source]
-

Creates a PredictionArguments object from a python dictionary with necessary specifications.

-
-
Parameters
-

obj (Dict) – A python dictionary with the necessary features for the PredictionArguments class.

-
-
Returns
-

A populated PredictionArguments object.

-
-
-
- -
-
-to_dict(encode_json=False)[source]
-

Creates a python dictionary from the PredictionArguments object

-
-
Return type
-

Dict[str, Union[dict, list, str, int, float, bool, None]]

-
-
Returns
-

A python dictionary

-
-
-
- -
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/data.html b/docs/data.html deleted file mode 100644 index 2e10c911b..000000000 --- a/docs/data.html +++ /dev/null @@ -1,524 +0,0 @@ - - - - - - - - - - Data — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Data

-

The focus of these modules is on storing, transforming, cleaning, splitting, merging, getting and removing data.

-
-
-class data.ConcatedEncodedDs(encoded_ds_arr)[source]
-

ConcatedEncodedDs abstracts over multiple encoded datasources (EncodedDs) as if they were a single entity.

-

Create a Lightwood datasource from a data frame and some encoders. This class inherits from torch.utils.data.Dataset.

-

Note: normal behavior is to cache encoded representations to avoid duplicated computations. If you want an option to disable, this please open an issue.

-
-
Parameters
-
    -
  • encoders – list of Lightwood encoders used to encode the data per each column.

  • -
  • data_frame – original dataframe.

  • -
  • target – name of the target column to predict.

  • -
-
-
-
-
-clear_cache()[source]
-

See lightwood.data.encoded_ds.EncodedDs.clear_cache().

-
- -
-
-property data_frame: pandas.core.frame.DataFrame
-

Property that concatenates all underlying EncodedDs’s dataframes and returns them.

-

Note: be careful to not modify a ConcatedEncodedDs, as you can see in the source, it will not have an effect.

-
-
Return type
-

DataFrame

-
-
Returns
-

Dataframe with all original data.

-
-
-
- -
-
-get_column_original_data(column_name)[source]
-

See lightwood.data.encoded_ds.EncodedDs.get_column_original_data().

-
-
Return type
-

Series

-
-
-
- -
-
-get_encoded_column_data(column_name)[source]
-

See lightwood.data.encoded_ds.EncodedDs.get_encoded_column_data().

-
-
Return type
-

Tensor

-
-
-
- -
- -
-
-class data.EncodedDs(encoders, data_frame, target)[source]
-

Create a Lightwood datasource from a data frame and some encoders. This class inherits from torch.utils.data.Dataset.

-

Note: normal behavior is to cache encoded representations to avoid duplicated computations. If you want an option to disable, this please open an issue.

-
-
Parameters
-
    -
  • encoders (List[BaseEncoder]) – list of Lightwood encoders used to encode the data per each column.

  • -
  • data_frame (DataFrame) – original dataframe.

  • -
  • target (str) – name of the target column to predict.

  • -
-
-
-
-
-clear_cache()[source]
-

Clears the EncodedDs cache.

-
- -
-
-get_column_original_data(column_name)[source]
-

Gets the original data for any given column of the EncodedDs.

-
-
Parameters
-

column_name (str) – name of the column.

-
-
Return type
-

Series

-
-
Returns
-

A pd.Series with the original data stored in the column_name column.

-
-
-
- -
-
-get_encoded_column_data(column_name)[source]
-

Gets the encoded data for any given column of the EncodedDs.

-
-
Parameters
-

column_name (str) – name of the column.

-
-
Return type
-

Tensor

-
-
Returns
-

A torch.Tensor with the encoded data of the column_name column.

-
-
-
- -
-
-get_encoded_data(include_target=True)[source]
-

Gets all encoded data.

-
-
Parameters
-

include_target – whether to include the target column in the output or not.

-
-
Return type
-

Tensor

-
-
Returns
-

A torch.Tensor with the encoded dataframe.

-
-
-
- -
- -
-
-data.cleaner(data, dtype_dict, pct_invalid, identifiers, target, mode, timeseries_settings, anomaly_detection, custom_cleaning_functions={})[source]
-

The cleaner is a function which takes in the raw data, plus additional information about it’s types and about the problem. Based on this it generates a “clean” representation of the data, where each column has an ideal standardized type and all malformed or otherwise missing or invalid elements are turned into None

-
-
Parameters
-
    -
  • data (DataFrame) – The raw data

  • -
  • dtype_dict (Dict[str, str]) – Type information for each column

  • -
  • pct_invalid (float) – How much of each column can be invalid

  • -
  • identifiers (Dict[str, str]) – A dict containing all identifier typed columns

  • -
  • target (str) – The target columns

  • -
  • mode (str) – Can be “predict” or “train”

  • -
  • timeseries_settings (TimeseriesSettings) – Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object

  • -
  • anomaly_detection (bool) – Are we detecting anomalies with this predictor?

  • -
-
-
Return type
-

DataFrame

-
-
Returns
-

The cleaned data

-
-
-
- -
-
-data.splitter(data, tss, dtype_dict, seed, pct_train, pct_dev, pct_test, target)[source]
-

Splits data into training, dev and testing datasets.

-

The proportion of data for each split must be specified (JSON-AI sets defaults to 80/10/10). First, rows in the dataset are shuffled randomly. Then a simple split is done. If a target value is provided and is of data type categorical/binary, then the splits will be stratified to maintain the representative populations of each class.

-
-
Parameters
-
    -
  • data (DataFrame) – Input dataset to be split

  • -
  • tss (TimeseriesSettings) – time-series specific details for splitting

  • -
  • dtype_dict (Dict[str, str]) – Dictionary with the data type of all columns

  • -
  • seed (int) – Random state for pandas data-frame shuffling

  • -
  • pct_train (float) – training fraction of data; must be less than 1

  • -
  • pct_dev (float) – dev fraction of data; must be less than 1

  • -
  • pct_test (float) – testing fraction of data; must be less than 1

  • -
  • target (str) – Name of the target column; if specified, data will be stratified on this column

  • -
-
-
Return type
-

Dict[str, DataFrame]

-
-
Returns
-

A dictionary containing the keys train, test and dev with their respective data frames, as well as the “stratified_on” key indicating which columns the data was stratified on (None if it wasn’t stratified on anything)

-
-
-
- -
-
-data.timeseries_analyzer(data, dtype_dict, timeseries_settings, target)[source]
-

This module analyzes (pre-processed) time series data and stores a few useful insights used in the rest of Lightwood’s pipeline.

-
-
Parameters
-
    -
  • data (DataFrame) – dataframe with time series dataset.

  • -
  • dtype_dict (Dict[str, str]) – dictionary with inferred types for every column.

  • -
  • timeseries_settings (TimeseriesSettings) – A TimeseriesSettings object. For more details, check lightwood.types.TimeseriesSettings.

  • -
  • target (str) – name of the target column.

  • -
-
-
-
-
The following things are extracted from each time series inside the dataset:
    -
  • group_combinations: all observed combinations of values for the set of group_by columns. The length of this list determines how many time series are in the data.

  • -
  • deltas: inferred sampling interval

  • -
  • ts_naive_residuals: Residuals obtained from the data by a naive forecaster that repeats the last-seen value.

  • -
  • ts_naive_mae: Mean residual value obtained from the data by a naive forecaster that repeats the last-seen value.

  • -
  • target_normalizers: objects that may normalize the data within any given time series for effective learning. See lightwood.encoder.time_series.helpers.common for available choices.

  • -
-
-
-
-
Return type
-

Dict

-
-
Returns
-

Dictionary with the aforementioned insights and the TimeseriesSettings object for future references.

-
-
-
- -
-
-data.transform_timeseries(data, dtype_dict, timeseries_settings, target, mode)[source]
-

Block that transforms the dataframe of a time series task to a convenient format for use in posterior phases like model training.

-
-
The main transformations performed by this block are:
    -
  • Type casting (e.g. to numerical for order_by columns).

  • -
  • Windowing functions for historical context based on TimeseriesSettings.window parameter.

  • -
  • Explicitly add target columns according to the TimeseriesSettings.nr_predictions parameter.

  • -
  • Flag all rows that are “predictable” based on all TimeseriesSettings.

  • -
  • Plus, handle all logic for the streaming use case (where forecasts are only emitted for the last observed data point).

  • -
-
-
-
-
Parameters
-
    -
  • data (DataFrame) – Dataframe with data to transform.

  • -
  • dtype_dict (Dict[str, str]) – Dictionary with the types of each column.

  • -
  • timeseries_settings (TimeseriesSettings) – A TimeseriesSettings object.

  • -
  • target (str) – The name of the target column to forecast.

  • -
  • mode (str) – Either “train” or “predict”, depending on what phase is calling this procedure.

  • -
-
-
Return type
-

DataFrame

-
-
Returns
-

A dataframe with all the transformations applied.

-
-
-
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/data/cleaner.html b/docs/data/cleaner.html deleted file mode 100644 index ace6bfabe..000000000 --- a/docs/data/cleaner.html +++ /dev/null @@ -1,287 +0,0 @@ - - - - - - - - - - Data Cleaning — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Data Cleaning

-
-
-data.cleaner.cleaner(data, dtype_dict, pct_invalid, identifiers, target, mode, timeseries_settings, anomaly_detection, custom_cleaning_functions={})[source]
-

The cleaner is a function which takes in the raw data, plus additional information about it’s types and about the problem. Based on this it generates a “clean” representation of the data, where each column has an ideal standardized type and all malformed or otherwise missing or invalid elements are turned into None

-
-
Parameters
-
    -
  • data (DataFrame) – The raw data

  • -
  • dtype_dict (Dict[str, str]) – Type information for each column

  • -
  • pct_invalid (float) – How much of each column can be invalid

  • -
  • identifiers (Dict[str, str]) – A dict containing all identifier typed columns

  • -
  • target (str) – The target columns

  • -
  • mode (str) – Can be “predict” or “train”

  • -
  • timeseries_settings (TimeseriesSettings) – Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object

  • -
  • anomaly_detection (bool) – Are we detecting anomalies with this predictor?

  • -
-
-
Return type
-

DataFrame

-
-
Returns
-

The cleaned data

-
-
-
- -
-
-data.cleaner.get_cleaning_func(data_dtype, custom_cleaning_functions)[source]
-

For the provided data type, provide the appropriate cleaning function. Below are the defaults, users can either override this function OR impose a custom block.

-
-
Parameters
-

data_dtype (dtype) – The data-type (inferred from a column) as prescribed from api.dtype

-
-
Return type
-

Callable

-
-
Returns
-

The appropriate function that will pre-process (clean) data of specified dtype.

-
-
-
- -
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/encoder.html b/docs/encoder.html deleted file mode 100644 index 5d68889cc..000000000 --- a/docs/encoder.html +++ /dev/null @@ -1,600 +0,0 @@ - - - - - - - - - - Encoders — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Encoders

-

Used for encoding data into PyTorch tensors and decoding it from pytorch tensors

-
-
-class encoder.ArrayEncoder(stop_after, window=None, is_target=False, original_type=None)[source]
-

Fits a normalizer for array data. To encode, ArrayEncoder returns a normalized window of previous data. -It can be used for generic arrays, as well as for handling historical target values in time series tasks.

-

Currently supported normalizing strategies are minmax for numerical arrays, and a simple one-hot for categorical arrays. See lightwood.encoder.helpers for more details on each approach.

-
-
Parameters
-
    -
  • stop_after (int) – time budget in seconds.

  • -
  • window (Optional[int]) – expected length of array data.

  • -
  • original_dtype – element-wise data type

  • -
-
-
-
- -
-
-class encoder.BaseEncoder(is_target=False)[source]
-

Base class for all encoders.

-

An encoder should return encoded representations of any columnar data. -The procedure for this is defined inside the encode() method.

-

If this encoder is expected to handle an output column, then it also needs to implement the respective decode() method that handles the inverse transformation from encoded representations to the final prediction in the original column space.

-

For encoders that learn representations (as opposed to rule-based), the prepare() method will handle all learning logic.

-

The to() method is used to move PyTorch-based encoders to and from a GPU.

-
-
Parameters
-
    -
  • is_target – Whether the data to encode is the target, as per the problem definition.

  • -
  • is_timeseries_encoder – Whether encoder represents sequential/time-series data. Lightwood must provide specific treatment for this kind of encoder

  • -
  • is_trainable_encoder – Whether the encoder must return learned representations. Lightwood checks whether this flag is present in order to pass data to the feature representation via the prepare statement.

  • -
-
-
-

Class Attributes: -- is_prepared: Internal flag to signal that the prepare() method has been successfully executed. -- is_nn_encoder: Whether the encoder is neural network-based. -- dependencies: list of additional columns that the encoder might need to encode. -- output_size: length of each encoding tensor for a single data point.

-
- -
-
-class encoder.BinaryEncoder(is_target=False, target_class_distribution=None)[source]
-
- -
-
-class encoder.CategoricalAutoEncoder(stop_after=3600, is_target=False, max_encoded_length=100)[source]
-
- -
-
-class encoder.DatetimeEncoder(is_target=False)[source]
-

This encoder produces an encoded representation for timestamps.

-

The approach consists on decomposing the timestamp objects into its constituent units (e.g. day-of-week, month, year, etc), and describing each of those with a single value that represents the magnitude in a sensible cycle length.

-
-
-encode(data)[source]
-
-
Parameters
-

data – # @TODO: receive a consistent data type here; currently either list of lists or pd.Series w/lists

-
-
Returns
-

encoded data

-
-
-
- -
-
-encode_one(unix_timestamp)[source]
-

Encodes a list of unix_timestamps, or a list of tensors with unix_timestamps -:param data: list of unix_timestamps (unix_timestamp resolution is seconds) -:return: a list of vectors

-
- -
- -
-
-class encoder.DatetimeNormalizerEncoder(is_target=False, sinusoidal=False)[source]
-
-
-encode(data)[source]
-
-
Parameters
-

data – # @TODO: receive a consistent data type here; currently either list of lists or pd.Series w/lists

-
-
Returns
-

encoded data

-
-
-
- -
-
-encode_one(data)[source]
-

Encodes a list of unix_timestamps, or a list of tensors with unix_timestamps -:param data: list of unix_timestamps (unix_timestamp resolution is seconds) -:return: a list of vectors

-
- -
- -
-
-class encoder.Img2VecEncoder(stop_after=3600, is_target=False)[source]
-
-
-encode(images)[source]
-

Encode list of images

-
-
Parameters
-

images (List[str]) – list of images, each image is a path to a file or a url

-
-
Return type
-

Tensor

-
-
Returns
-

a torch.floatTensor

-
-
-
- -
- -
-
-class encoder.MultiHotEncoder(is_target=False)[source]
-
- -
-
-class encoder.NumericEncoder(data_type=None, is_target=False, positive_domain=False)[source]
-
- -
-
-class encoder.OneHotEncoder(is_target=False, target_class_distribution=None, handle_unknown='unknown_token')[source]
-
- -
-
-class encoder.PretrainedLangEncoder(stop_after, is_target=False, model_name='distilbert', custom_tokenizer=None, batch_size=10, max_position_embeddings=None, frozen=False, epochs=1, output_type=None, embed_mode=True)[source]
-
-
-encode(column_data)[source]
-

TODO: Maybe batch the text up; may take too long -Given column data, encode the dataset.

-

Currently, returns the embedding of the pre-classifier layer.

-

Args: -column_data:: [list[str]] list of text data in str form

-

Returns: -encoded_representation:: [torch.Tensor] N_sentences x Nembed_dim

-
- -
-
-is_trainable_encoder: bool = True
-

Pretrained language models. -Option to train on a target encoding of choice.

-

Args: -is_target ::Bool; data column is the target of ML. -model_name ::str; name of pre-trained model -custom_tokenizer ::function; custom tokenizing function -batch_size ::int; size of batch -max_position_embeddings ::int; max sequence length of input text -custom_train ::Bool; If true, trains model on target procided -frozen ::Bool; If true, freezes transformer layers during training. -epochs ::int; number of epochs to train model with -embed_mode ::Bool; If true, assumes the output of the encode() step is the CLS embedding.

-
- -
-
-prepare(train_priming_data, dev_priming_data, encoded_target_values)[source]
-

Prepare the encoder by training on the target.

-

Training data must be a dict with “targets” avail. -Automatically assumes this.

-
- -
- -
-
-class encoder.ShortTextEncoder(is_target=False, mode=None)[source]
-
-
Parameters
-
    -
  • is_target

  • -
  • mode – None or “concat” or “mean”. -When None, it will be set automatically based on is_target: -(is_target) -> ‘concat’ -(not is_target) -> ‘mean’

  • -
-
-
-
- -
-
-encoder.TextRnnEncoder
-

alias of lightwood.encoder.text.rnn.RnnEncoder

-
- -
-
-class encoder.TimeSeriesEncoder(stop_after, is_target=False, original_type=None, target=None, grouped_by=[], encoder_type='rnn')[source]
-

Time series encoder. This module can learn features for any order_by temporal column, both with and without accompanying target data.

-

The backbone of this encoder is either a recurrent neural network or a transformer; both structured in an encoder-decoder fashion.

-
-
-decode(encoded_data, steps=None)[source]
-

Decode a list of embedded multidimensional time series -:param encoded_data: a list of embeddings [ e1, e2, …] to be decoded into time series -:param steps: fixed number of timesteps to reconstruct from each embedding. -If None, encoder will output the largest length encountered during training. -:return: a list of reconstructed time series

-
- -
-
-encode(column_data, dependency_data=None, get_next_count=None)[source]
-

Encode a list of time series data -:param column_data: a list of (self._n_dims)-dimensional time series [[dim1_data], …] to encode -:param get_next_count: default None, but you can pass a number X and it will return the X following predictions

-
-

on the series for each ts_data_point in column_data

-
-
-
Returns
-

a list of encoded time series or if get_next_count !=0 two lists (encoded_values, projected_numbers)

-
-
-
- -
-
-prepare(train_priming_data, dev_priming_data, dependency_data={}, ts_analysis=None, feedback_hoop_function=<bound method Logger.info of <Logger lightwood-2594 (DEBUG)>>, batch_size=256)[source]
-
-
Parameters
-
    -
  • priming_data – a list of (self._n_dims)-dimensional time series [[dim1_data], …]

  • -
  • dependency_data – raw data from other columns

  • -
  • ts_analysis – dictionary with time analysis info (e.g. normalizers for each target group)

  • -
  • feedback_hoop_function – method to use if you want to get feedback on the training process

  • -
-
-
-

:param batch_size

-
- -
-
-setup_nn(ts_analysis, dependencies=None)[source]
-

This method must be executed after initializing, else types are unassigned

-
- -
- -
-
-class encoder.TsArrayNumericEncoder(timesteps, is_target=False, positive_domain=False, grouped_by=None)[source]
-

Variant of vanilla numerical encoder, supports dynamic mean re-scaling

-
-
-encode(data, dependency_data={})[source]
-
-
Parameters
-

dependency_data – dict with grouped_by column info, to retrieve the correct normalizer for each datum

-
-
Returns
-

tensor with shape (batch, NxK) where N: self.data_window and K: sub-encoder # of output features

-
-
-
- -
- -
-
-class encoder.TsNumericEncoder(is_target=False, positive_domain=False, grouped_by=None)[source]
-

Variant of vanilla numerical encoder, supports dynamic mean re-scaling

-
-
-encode(data, dependency_data={})[source]
-
-
Parameters
-

dependency_data – dict with grouped_by column info, to retrieve the correct normalizer for each datum

-
-
-
- -
- -
-
-class encoder.VocabularyEncoder(is_target=False)[source]
-
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/ensemble.html b/docs/ensemble.html deleted file mode 100644 index c5b7d2fe2..000000000 --- a/docs/ensemble.html +++ /dev/null @@ -1,297 +0,0 @@ - - - - - - - - - - Ensemble — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Ensemble

-

Ensemble mixers together in order to generate predictions

-
-
-class ensemble.BaseEnsemble(target, mixers, data)[source]
-

Base class for all ensembles.

-

Ensembles wrap sets of Lightwood mixers, with the objective of generating better predictions based on the output of each mixer.

-
-
There are two important methods for any ensemble to work:
    -
  1. __init__() should prepare all mixers and internal ensemble logic.

  2. -
  3. __call__() applies any aggregation rules to generate final predictions based on the output of each mixer.

  4. -
-
-
-

Class Attributes: -- mixers: List of mixers the ensemble will use. -- supports_proba: For classification tasks, whether the ensemble supports yielding per-class scores rather than only returning the predicted label.

-
- -
-
-class ensemble.BestOf(target, mixers, data, accuracy_functions, args, ts_analysis=None)[source]
-

This ensemble acts as a mixer selector. -After evaluating accuracy for all internal mixers with the validation data, it sets the best mixer as the underlying model.

-
- -
-
-class ensemble.MeanEnsemble(target, mixers, data, dtype_dict)[source]
-
- -
-
-class ensemble.ModeEnsemble(target, mixers, data, dtype_dict, accuracy_functions, args, ts_analysis=None)[source]
-
- -
-
-class ensemble.WeightedMeanEnsemble(target, mixers, data, args, dtype_dict, accuracy_functions, ts_analysis=None)[source]
-
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/genindex.html b/docs/genindex.html deleted file mode 100644 index 1fad17784..000000000 --- a/docs/genindex.html +++ /dev/null @@ -1,816 +0,0 @@ - - - - - - - - - - Index — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Index
  • - - -
  • - - - -
  • - -
- - -
-
-
-
- - -

Index

- -
- A - | B - | C - | D - | E - | F - | G - | H - | I - | J - | L - | M - | N - | O - | P - | R - | S - | T - | U - | V - | W - -
-

A

- - - -
- -

B

- - - -
- -

C

- - - -
- -

D

- - - -
- -

E

- - - -
- -

F

- - - -
- -

G

- - - -
- -

H

- - -
    -
  • - helpers - -
  • -
- -

I

- - - -
- -

J

- - - -
- -

L

- - - -
- -

M

- - - -
- -

N

- - - -
- -

O

- - - -
- -

P

- - - -
- -

R

- - -
- -

S

- - - -
- -

T

- - - -
- -

U

- - -
- -

V

- - - -
- -

W

- - -
- - - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/helpers.html b/docs/helpers.html deleted file mode 100644 index a335e1397..000000000 --- a/docs/helpers.html +++ /dev/null @@ -1,249 +0,0 @@ - - - - - - - - - - Helpers — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Helpers

-

Various helper functions

-
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/index.html b/docs/index.html deleted file mode 100644 index 4f957f081..000000000 --- a/docs/index.html +++ /dev/null @@ -1,479 +0,0 @@ - - - - - - - - - - Welcome to Lightwood’s Documentation! — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Welcome to Lightwood’s Documentation!
  • - - -
  • - - - View page source - - -
  • - -
- - -
-
-
-
- - - -
-

Welcome to Lightwood’s Documentation!

-
-
Release
-

1.6.1

-
-
Date
-

Nov 03, 2021

-
-
-
-

-
-

Lightwood is an AutoML framework that enables you to generate and customize machine learning pipelines declarative syntax called JSON-AI.

-

Our goal is to make the data science/machine learning (DS/ML) life cycle easier by allowing users to focus on what they want to do their data without needing to write repetitive boilerplate code around machine learning and data preparation. Instead, we enable you to focus on the parts of a model that are truly unique and custom.

-

Lightwood works with a variety of data types such as numbers, dates, categories, tags, text, arrays and various multimedia formats. These data types can be combined together to solve complex problems. We also support a time-series mode for problems that have between-row dependencies.

-

Our JSON-AI syntax allows users to change any and all parts of the models Lightwood automatically generates. The syntax outlines the specifics details in each step of the modeling pipeline. Users may override default values (for example, changing the type of a column) or alternatively, entirely replace steps with their own methods (ex: use a random forest model for a predictor). Lightwood creates a “JSON-AI” object from this syntax which can then be used to automatically generate python code to represent your pipeline.

-

For details as to how Lightwood works, check out the Lightwood Philosophy .

- -
-

Installation

-

You can install Lightwood as follows:

-
pip3 install lightwood
-
-
-
-

Note

-

depending on your environment, you might have to use pip instead of pip3 in the above command.

-
-

However, we recommend creating a python virtual environment.

-
-

Setting up a dev environment

-
    -
  • Clone lightwood

  • -
  • Run cd lightwood && pip install requirements.txt

  • -
  • Add it to your python path (e.g. by adding export PYTHONPATH='/where/you/cloned/lightwood':$PYTHONPATH as a newline at the end of your ~/.bashrc file)

  • -
  • Check that the unit-tests are passing by going into the directory where you cloned lightwood and running: python -m unittest discover tests

  • -
-
-

Warning

-

If python default to python2.x on your environment use python3 and pip3 instead

-
-

Currently, the preferred environment for working with lightwood is visual studio code, a very popular python IDE. However, any IDE should work. While we don’t have guides for those, please feel free to use the following section as a template for VSCode, or to contribute your own tips and tricks to set up other IDEs.

-
-
-

Setting up a VSCode environment

-
    -
  • Install and enable setting sync using github account (if you use multiple machines)

  • -
  • Install pylance (for types) and make sure to disable pyright

  • -
  • Go to Python > Lint: Enabled and disable everything but flake8

  • -
  • Set python.linting.flake8Path to the full path to flake8 (which flake8)

  • -
  • Set Python Formatting: Provider to autopep8

  • -
  • Add --global-config=<path_to>/lightwood/.flake8 and --experimental to Python Formatting: Autopep8 Args

  • -
  • Install live share and live share whiteboard

  • -
-
-
-
-

Example Use Cases

-

Lightwood works with pandas.DataFrames. Once a DataFrame is loaded, defined a “ProblemDefinition” via a dictionary. The only thing a user needs to specify is the name of the column to predict (via the key target).

-

Create a JSON-AI syntax from the command json_ai_from_problem. Lightwood can then use this object to automatically generate python code filling in the steps of the ML pipeline via code_from_json_ai.

-

You can make a Predictor object, instantiated with that code via predictor_from_code.

-

To train a Predictor end-to-end, starting with unprocessed data, users can use the predictor.learn() command with the data.

-
import pandas as pd
-from lightwood.api.high_level import (
-    ProblemDefinition,
-    json_ai_from_problem,
-    code_from_json_ai,
-    predictor_from_code,
-)
-
-# Load a pandas dataset
-df = pd.read_csv(
-    "https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/hdi/data.csv"
-)
-
-# Define the prediction task by naming the target column
-pdef = ProblemDefinition.from_dict(
-    {
-        "target": "Development Index",  # column you want to predict
-    }
-)
-
-# Generate JSON-AI code to model the problem
-json_ai = json_ai_from_problem(df, problem_definition=pdef)
-
-# OPTIONAL - see the JSON-AI syntax
-#print(json_ai.to_json())
-
-# Generate python code
-code = code_from_json_ai(json_ai)
-
-# OPTIONAL - see generated code
-#print(code)
-
-# Create a predictor from python code
-predictor = predictor_from_code(code)
-
-# Train a model end-to-end from raw data to a finalized predictor
-predictor.learn(df)
-
-# Make the train/test splits and show predictions for a few examples
-test_df = predictor.split(predictor.preprocess(df))["test"]
-preds = predictor.predict(test).iloc[:10]
-print(preds)
-
-
-
-

BYOM: Bring your own models

-

Lightwood supports user architectures/approaches so long as you follow the abstractions provided within each step.

-

Our tutorials provide specific use cases for how to introduce customization into your pipeline. Check out “custom cleaner”, “custom splitter”, “custom explainer”, and “custom mixer”. Stay tuned for further updates.

-
-
-
-

Contribute to Lightwood

-

We love to receive contributions from the community and hear your opinions! We want to make contributing to Lightwood as easy as it can be.

-

Being part of the core Lightwood team is possible to anyone who is motivated and wants to be part of that journey!

-

Please continue reading this guide if you are interested in helping democratize machine learning.

-
-

How can you help us?

-
    -
  • Report a bug

  • -
  • Improve documentation

  • -
  • Solve an issue

  • -
  • Propose new features

  • -
  • Discuss feature implementations

  • -
  • Submit a bug fix

  • -
  • Test Lightwood with your own data and let us know how it went!

  • -
-
-
-

Code contributions

-

In general, we follow the fork-and-pull git workflow. Here are the steps:

-
    -
  1. Fork the Lightwood repository

  2. -
  3. Checkout the staging branch, which is the development version that gets released weekly (there can be exceptions, but make sure to ask and confirm with us).

  4. -
  5. Make changes and commit them

  6. -
  7. Make sure that the CI tests pass. You can run the test suite locally with flake8 . to check style and python -m unittest discover tests to run the automated tests. This doesn’t guarantee it will pass remotely since we run on multiple envs, but should work in most cases.

  8. -
  9. Push your local branch to your fork

  10. -
  11. Submit a pull request from your repo to the staging branch of mindsdb/lightwood so that we can review your changes. Be sure to merge the latest from staging before making a pull request!

  12. -
-
-

Note

-

You will need to sign a CLI agreement for the code since lightwood is under a GPL license.

-
-
-
-

Feature and Bug reports

-

We use GitHub issues to track bugs and features. Report them by opening a new issue and fill out all of the required inputs.

-
-
-

Code review process

-

Pull request (PR) reviews are done on a regular basis. If your PR does not address a previous issue, please make an issue first.

-

If your change has a chance to affecting performance we will run our private benchmark suite to validate it.

-

Please, make sure you respond to our feedback/questions.

-
-
-

Community

-

If you have additional questions or you want to chat with MindsDB core team, you can join our community:

- -MindsDB Community -

To get updates on Lightwood and MindsDB’s latest announcements, releases, and events, sign up for our Monthly Community Newsletter.

-

Join our mission of democratizing machine learning and allowing developers to become data scientists!

-
-
-
-

Hacktoberfest 2021

-

We are very excited that Lightwood is participating in this year’s Hacktoberfest 2021 event. This month-long event through October gives you the chance to contribute to the Open Source codebase of Lightwood and MindsDB!

-

The Lightwood core team has prepared several issues of different types that are ideal for first-time contributors and will be posted throughout the month. It’s entirely up to you what you choose to work on and if you have your own great idea, feel free to suggest it by reaching out to us via our Slack community or by posting an issue with the discussion tag.

-

Our Major Incentive and SWAG!

-

Make contributions and enter into the draw for a Deep Learning Laptop powered by the NVIDIA RTX 3080 Max-Q GPU. Pre-installed with TensorFlow, PyTorch, CUDA, cuDNN and more.

-Tensorbook by Lambda Labs -

Also, we’d love to send you a special MindsDB SWAG gift pack:

-MindsDB Swag -

Please make sure to read the contributions-guidelines first!

-
-

How to participate

-
    -
  1. Contribute by making pull requests to any of our open issues labeled with the hacktoberfest tag during October. All hacktoberfest issues will specify how many points a successfully merged PR is worth.

  2. -
  3. Have a total score of at least 5 points in order to enter the big prize draw.

  4. -
  5. Complete the form with links to all your completed PR’s so we know where to ship the gift pack to!

  6. -
-

Entries close at midnight (PST) Sunday, 31 October 2021 with the prize draw winner announced at an online event on Monday, 1st of November.

-

Please check MindsDB’s hacktoberfest website for more details.

-
-

Note

-

if you wish to contribute with something that is not currently flagged as a hacktoberfest issue, make an issue (or make a comment if an issue already exists), and let one of the core Lightwood team researchers approve it.

-
-
-
-

Contributor Code of Conduct

-

Please note that this project is released with a Contributor Code of Conduct. By participating in this project, you agree to abide by its terms.

-
-
-
-

Current contributors

- - - - -
-
-

License

- -PyPI - License - -
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/lightwood_philosophy.html b/docs/lightwood_philosophy.html deleted file mode 100644 index a99733aaf..000000000 --- a/docs/lightwood_philosophy.html +++ /dev/null @@ -1,279 +0,0 @@ - - - - - - - - - - Lightwood Philosophy — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Lightwood Philosophy

-

Lightwood abstracts the ML pipeline into 3 core steps:

-
    -
  1. Pre-processing and data cleaning

  2. -
  3. Feature engineering

  4. -
  5. Model building and training

  6. -
-Lightwood "under-the-hood" -
-

i) Pre-processing and cleaning

-

For each column in your dataset, Lightwood will identify the suspected data type (numeric, categorical, etc.) via a brief statistical analysis. From this, it will generate a JSON-AI syntax.

-

If the user keeps default behavior, Lightwood will perform a brief pre-processing approach to clean each column according to its identified data type. From there, it will split the data into train/dev/test splits.

-

The cleaner and splitter objects respectively refer to the pre-processing and the data splitting functions.

-
-
-

ii) Feature Engineering

-

Data can be converted into features via “encoders”. Encoders represent the rules for transforming pre-processed data into a numerical representations that a model can be used.

-

Encoders can be rule-based or learned. A rule-based encoder transforms data per a specific set of instructions (ex: normalized numerical data) whereas a learned encoder produces a representation of the data after training (ex: a “[CLS]” token in a language model).

-

Encoders are assigned to each column of data based on the data type; users can override this assignment either at the column-based level or at the data-type based level. Encoders inherit from the BaseEncoder class.

-
-
-

iii) Model Building and Training

-

We call a predictive model that intakes encoded feature data and outputs a prediction for the target of interest a mixer model. Users can either use Lightwood’s default mixers or create their own approaches inherited from the BaseMixer class.

-

We predominantly use PyTorch based approaches, but can support other models.

-
-
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/mixer.html b/docs/mixer.html deleted file mode 100644 index 8f23d4b17..000000000 --- a/docs/mixer.html +++ /dev/null @@ -1,631 +0,0 @@ - - - - - - - - - - Mixers — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Mixers

-

Machine learning models which learn to predict the target value using the encoded representations.

-
-
-class mixer.BaseMixer(stop_after)[source]
-

Base class for all mixers.

-

Mixers are the backbone of all Lightwood machine learning models. They intake encoded feature representations for every column, and are tasked with learning to fulfill the predictive requirements stated in a problem definition.

-
-
There are two important methods for any mixer to work:
    -
  1. fit() contains all logic to train the mixer with the training data that has been encoded by all the (already trained) Lightwood encoders for any given task.

  2. -
  3. __call__() is executed to generate predictions once the mixer has been trained using fit().

  4. -
-
-
-

An additional partial_fit() method is used to update any mixer that has already been trained.

-

Class Attributes: -- stable: If set to True, this mixer should always work. Any mixer with stable=False can be expected to fail under some circumstances. -- fit_data_len: Length of the training data. -- supports_proba: For classification tasks, whether the mixer supports yielding per-class scores rather than only returning the predicted label.

-

Initializer a mixer.

-
-
Parameters
-

stop_after (int) – Time budget to train this mixer.

-
-
-
-
-fit(train_data, dev_data)[source]
-

Fits/trains a mixer with training data.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of the “dev” data subset. This can be used as an internal validation subset (e.g. it is used for early stopping in the default Neural mixer).

  • -
-
-
Return type
-

None

-
-
-
- -
-
-partial_fit(train_data, dev_data)[source]
-

Partially fits/trains a mixer with new training data. This is a somewhat experimental method, and it aims at updating pre-existing Lightwood predictors.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the new training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of new the “dev” data subset. As in fit(), this can be used as an internal validation subset.

  • -
-
-
Return type
-

None

-
-
-
- -
- -
-
-class mixer.LightGBM(stop_after, target, dtype_dict, input_cols, fit_on_dev, use_optuna=True)[source]
-

Initializer a mixer.

-
-
Parameters
-

stop_after (int) – Time budget to train this mixer.

-
-
-
-
-fit(train_data, dev_data)[source]
-

Fits/trains a mixer with training data.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of the “dev” data subset. This can be used as an internal validation subset (e.g. it is used for early stopping in the default Neural mixer).

  • -
-
-
Return type
-

None

-
-
-
- -
-
-partial_fit(train_data, dev_data)[source]
-

Partially fits/trains a mixer with new training data. This is a somewhat experimental method, and it aims at updating pre-existing Lightwood predictors.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the new training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of new the “dev” data subset. As in fit(), this can be used as an internal validation subset.

  • -
-
-
Return type
-

None

-
-
-
- -
- -
-
-class mixer.LightGBMArray(stop_after, target, dtype_dict, input_cols, n_ts_predictions, fit_on_dev)[source]
-

LightGBM-based model, intended for usage in time series tasks.

-

Initializer a mixer.

-
-
Parameters
-

stop_after (int) – Time budget to train this mixer.

-
-
-
-
-fit(train_data, dev_data)[source]
-

Fits/trains a mixer with training data.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of the “dev” data subset. This can be used as an internal validation subset (e.g. it is used for early stopping in the default Neural mixer).

  • -
-
-
Return type
-

None

-
-
-
- -
-
-partial_fit(train_data, dev_data)[source]
-

Partially fits/trains a mixer with new training data. This is a somewhat experimental method, and it aims at updating pre-existing Lightwood predictors.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the new training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of new the “dev” data subset. As in fit(), this can be used as an internal validation subset.

  • -
-
-
Return type
-

None

-
-
-
- -
- -
-
-class mixer.Neural(stop_after, target, dtype_dict, timeseries_settings, target_encoder, net, fit_on_dev, search_hyperparameters)[source]
-

The Neural mixer trains a fully connected dense network from concatenated encoded outputs of each of the features in the dataset to predicted the encoded output.

-
-
Parameters
-
    -
  • stop_after (int) – How long the total fitting process should take

  • -
  • target (str) – Name of the target column

  • -
  • dtype_dict (Dict[str, str]) – Data type dictionary

  • -
  • timeseries_settings (TimeseriesSettings) – TimeseriesSettings object for time-series tasks, refer to its documentation for available settings.

  • -
  • target_encoder (BaseEncoder) – Reference to the encoder used for the target

  • -
  • net (str) – The network type to use (DeafultNet or ArNet)

  • -
  • fit_on_dev (bool) – If we should fit on the dev dataset

  • -
  • search_hyperparameters (bool) – If the network should run a more through hyperparameter search (currently disabled)

  • -
-
-
-
-
-fit(train_data, dev_data)[source]
-

Fits the Neural mixer on some data, making it ready to predit

-
-
Parameters
-
    -
  • train_data (EncodedDs) – The EncodedDs on which to train the network

  • -
  • dev_data (EncodedDs) – Data used for early stopping and hyperparameter determination

  • -
-
-
Return type
-

None

-
-
-
- -
-
-partial_fit(train_data, dev_data)[source]
-

Augments the mixer’s fit with new data, nr of epochs is based on the amount of epochs the original fitting took

-
-
Parameters
-
    -
  • train_data (EncodedDs) – The EncodedDs on which to train the network

  • -
  • dev_data (EncodedDs) – Data used for early stopping and hyperparameter determination

  • -
-
-
Return type
-

None

-
-
-
- -
- -
-
-class mixer.Regression(stop_after, target_encoder, dtype_dict, target)[source]
-

Initializer a mixer.

-
-
Parameters
-

stop_after (int) – Time budget to train this mixer.

-
-
-
-
-fit(train_data, dev_data)[source]
-

Fits/trains a mixer with training data.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of the “dev” data subset. This can be used as an internal validation subset (e.g. it is used for early stopping in the default Neural mixer).

  • -
-
-
Return type
-

None

-
-
-
- -
-
-partial_fit(train_data, dev_data)[source]
-

Partially fits/trains a mixer with new training data. This is a somewhat experimental method, and it aims at updating pre-existing Lightwood predictors.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the new training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of new the “dev” data subset. As in fit(), this can be used as an internal validation subset.

  • -
-
-
Return type
-

None

-
-
-
- -
- -
-
-class mixer.SkTime(stop_after, target, dtype_dict, n_ts_predictions, ts_analysis)[source]
-

Initializer a mixer.

-
-
Parameters
-

stop_after (int) – Time budget to train this mixer.

-
-
-
-
-fit(train_data, dev_data)[source]
-

Fits/trains a mixer with training data.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of the “dev” data subset. This can be used as an internal validation subset (e.g. it is used for early stopping in the default Neural mixer).

  • -
-
-
Return type
-

None

-
-
-
- -
-
-partial_fit(train_data, dev_data)[source]
-

Note: sktime asks for “specification of the time points for which forecasts are requested”, -and this mixer complies by assuming forecasts will start immediately after the last observed -value.

-

Because of this, partial_fit ensures that both dev and test splits are used to fit the AutoARIMA model.

-

Due to how lightwood implements the update procedure, expected inputs are (for a train-dev-test split):

-
-
Parameters
-
    -
  • dev_data (EncodedDs) – original test split (used to validate and select model if ensemble is BestOf)

  • -
  • train_data (EncodedDs) – includes original train and dev split

  • -
-
-
Return type
-

None

-
-
-
- -
- -
-
-class mixer.Unit(stop_after, target_encoder)[source]
-

Initializer a mixer.

-
-
Parameters
-

stop_after (int) – Time budget to train this mixer.

-
-
-
-
-fit(train_data, dev_data)[source]
-

Fits/trains a mixer with training data.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of the “dev” data subset. This can be used as an internal validation subset (e.g. it is used for early stopping in the default Neural mixer).

  • -
-
-
Return type
-

None

-
-
-
- -
-
-partial_fit(train_data, dev_data)[source]
-

Partially fits/trains a mixer with new training data. This is a somewhat experimental method, and it aims at updating pre-existing Lightwood predictors.

-
-
Parameters
-
    -
  • train_data (EncodedDs) – encoded representations of the new training data subset.

  • -
  • dev_data (EncodedDs) – encoded representations of new the “dev” data subset. As in fit(), this can be used as an internal validation subset.

  • -
-
-
Return type
-

None

-
-
-
- -
- -
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/objects.inv b/docs/objects.inv deleted file mode 100644 index 229710788..000000000 Binary files a/docs/objects.inv and /dev/null differ diff --git a/docs/py-modindex.html b/docs/py-modindex.html deleted file mode 100644 index 0477409bb..000000000 --- a/docs/py-modindex.html +++ /dev/null @@ -1,310 +0,0 @@ - - - - - - - - - - Python Module Index — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Python Module Index
  • - - -
  • - -
  • - -
- - -
-
-
-
- - -

Python Module Index

- -
- a | - d | - e | - h | - m -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 
- a
- analysis -
- api -
    - api.high_level -
    - api.json_ai -
    - api.predictor -
    - api.types -
 
- d
- data -
    - data.cleaner -
 
- e
- encoder -
- ensemble -
 
- h
- helpers -
 
- m
- mixer -
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/search.html b/docs/search.html deleted file mode 100644 index b66fb5149..000000000 --- a/docs/search.html +++ /dev/null @@ -1,234 +0,0 @@ - - - - - - - - - - Search — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Search
  • - - -
  • - -
  • - -
- - -
-
-
-
- - - - -
- -
- -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/docs/searchindex.js b/docs/searchindex.js deleted file mode 100644 index b1ddbcd27..000000000 --- a/docs/searchindex.js +++ /dev/null @@ -1 +0,0 @@ -Search.setIndex({docnames:["analysis","api","api/dtype","api/encode","api/high_level","api/json_ai","api/predictor","api/types","data","data/cleaner","encoder","ensemble","helpers","index","lightwood_philosophy","mixer","tutorials","tutorials/custom_cleaner/custom_cleaner","tutorials/custom_encoder_rulebased/custom_encoder_rulebased","tutorials/custom_explainer/custom_explainer","tutorials/custom_mixer/custom_mixer","tutorials/custom_splitter/custom_splitter","tutorials/tutorial_data_analysis/tutorial_data_analysis","tutorials/tutorial_time_series/Tutorial - Training a time series predictor","tutorials/tutorial_update_models/Tutorial -- Update a predictor"],envversion:{"sphinx.domains.c":2,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":4,"sphinx.domains.index":1,"sphinx.domains.javascript":2,"sphinx.domains.math":2,"sphinx.domains.python":3,"sphinx.domains.rst":2,"sphinx.domains.std":2,"sphinx.ext.viewcode":1,nbsphinx:3,sphinx:56},filenames:["analysis.rst","api.rst","api/dtype.rst","api/encode.rst","api/high_level.rst","api/json_ai.rst","api/predictor.rst","api/types.rst","data.rst","data/cleaner.rst","encoder.rst","ensemble.rst","helpers.rst","index.rst","lightwood_philosophy.rst","mixer.rst","tutorials.rst","tutorials/custom_cleaner/custom_cleaner.ipynb","tutorials/custom_encoder_rulebased/custom_encoder_rulebased.ipynb","tutorials/custom_explainer/custom_explainer.ipynb","tutorials/custom_mixer/custom_mixer.ipynb","tutorials/custom_splitter/custom_splitter.ipynb","tutorials/tutorial_data_analysis/tutorial_data_analysis.ipynb","tutorials/tutorial_time_series/Tutorial - Training a time series predictor.ipynb","tutorials/tutorial_update_models/Tutorial -- Update a predictor.ipynb"],objects:{"":{analysis:[0,0,0,"-"],data:[8,0,0,"-"],encoder:[10,0,0,"-"],ensemble:[11,0,0,"-"],helpers:[12,0,0,"-"],mixer:[15,0,0,"-"]},"analysis.AccStats":{analyze:[0,2,1,""]},"analysis.BaseAnalysisBlock":{analyze:[0,2,1,""],explain:[0,2,1,""]},"analysis.GlobalFeatureImportance":{analyze:[0,2,1,""]},"analysis.ICP":{analyze:[0,2,1,""],explain:[0,2,1,""]},"api.dtype":{dtype:[2,1,1,""]},"api.high_level":{analyze_dataset:[4,3,1,""],code_from_json_ai:[4,3,1,""],code_from_problem:[4,3,1,""],json_ai_from_problem:[4,3,1,""],predictor_from_code:[4,3,1,""],predictor_from_json_ai:[4,3,1,""],predictor_from_problem:[4,3,1,""],predictor_from_state:[4,3,1,""]},"api.json_ai":{code_from_json_ai:[5,3,1,""],generate_json_ai:[5,3,1,""],lookup_encoder:[5,3,1,""],validate_json_ai:[5,3,1,""]},"api.predictor":{PredictorInterface:[6,1,1,""]},"api.predictor.PredictorInterface":{adjust:[6,2,1,""],analyze_data:[6,2,1,""],analyze_ensemble:[6,2,1,""],featurize:[6,2,1,""],fit:[6,2,1,""],learn:[6,2,1,""],predict:[6,2,1,""],prepare:[6,2,1,""],preprocess:[6,2,1,""],save:[6,2,1,""],split:[6,2,1,""]},"api.types":{DataAnalysis:[7,1,1,""],Feature:[7,1,1,""],JsonAI:[7,1,1,""],ModelAnalysis:[7,1,1,""],Module:[7,1,1,""],Output:[7,1,1,""],PredictionArguments:[7,1,1,""],ProblemDefinition:[7,1,1,""],StatisticalAnalysis:[7,1,1,""],TimeseriesSettings:[7,1,1,""],TypeInformation:[7,1,1,""]},"api.types.Feature":{from_dict:[7,2,1,""],from_json:[7,2,1,""],to_dict:[7,2,1,""],to_json:[7,2,1,""]},"api.types.JsonAI":{from_dict:[7,2,1,""],from_json:[7,2,1,""],to_dict:[7,2,1,""],to_json:[7,2,1,""]},"api.types.PredictionArguments":{from_dict:[7,2,1,""],to_dict:[7,2,1,""]},"api.types.ProblemDefinition":{from_dict:[7,2,1,""],from_json:[7,2,1,""],to_dict:[7,2,1,""],to_json:[7,2,1,""]},"api.types.TimeseriesSettings":{from_dict:[7,2,1,""],from_json:[7,2,1,""],to_dict:[7,2,1,""],to_json:[7,2,1,""]},"data.ConcatedEncodedDs":{clear_cache:[8,2,1,""],data_frame:[8,4,1,""],get_column_original_data:[8,2,1,""],get_encoded_column_data:[8,2,1,""]},"data.EncodedDs":{clear_cache:[8,2,1,""],get_column_original_data:[8,2,1,""],get_encoded_column_data:[8,2,1,""],get_encoded_data:[8,2,1,""]},"data.cleaner":{cleaner:[9,3,1,""],get_cleaning_func:[9,3,1,""]},"encoder.DatetimeEncoder":{encode:[10,2,1,""],encode_one:[10,2,1,""]},"encoder.DatetimeNormalizerEncoder":{encode:[10,2,1,""],encode_one:[10,2,1,""]},"encoder.Img2VecEncoder":{encode:[10,2,1,""]},"encoder.PretrainedLangEncoder":{encode:[10,2,1,""],is_trainable_encoder:[10,5,1,""],prepare:[10,2,1,""]},"encoder.TimeSeriesEncoder":{decode:[10,2,1,""],encode:[10,2,1,""],prepare:[10,2,1,""],setup_nn:[10,2,1,""]},"encoder.TsArrayNumericEncoder":{encode:[10,2,1,""]},"encoder.TsNumericEncoder":{encode:[10,2,1,""]},"mixer.BaseMixer":{fit:[15,2,1,""],partial_fit:[15,2,1,""]},"mixer.LightGBM":{fit:[15,2,1,""],partial_fit:[15,2,1,""]},"mixer.LightGBMArray":{fit:[15,2,1,""],partial_fit:[15,2,1,""]},"mixer.Neural":{fit:[15,2,1,""],partial_fit:[15,2,1,""]},"mixer.Regression":{fit:[15,2,1,""],partial_fit:[15,2,1,""]},"mixer.SkTime":{fit:[15,2,1,""],partial_fit:[15,2,1,""]},"mixer.Unit":{fit:[15,2,1,""],partial_fit:[15,2,1,""]},analysis:{AccStats:[0,1,1,""],BaseAnalysisBlock:[0,1,1,""],GlobalFeatureImportance:[0,1,1,""],ICP:[0,1,1,""],explain:[0,3,1,""],model_analyzer:[0,3,1,""]},api:{high_level:[4,0,0,"-"],json_ai:[5,0,0,"-"],predictor:[6,0,0,"-"],types:[7,0,0,"-"]},data:{ConcatedEncodedDs:[8,1,1,""],EncodedDs:[8,1,1,""],cleaner:[9,0,0,"-"],splitter:[8,3,1,""],timeseries_analyzer:[8,3,1,""],transform_timeseries:[8,3,1,""]},encoder:{ArrayEncoder:[10,1,1,""],BaseEncoder:[10,1,1,""],BinaryEncoder:[10,1,1,""],CategoricalAutoEncoder:[10,1,1,""],DatetimeEncoder:[10,1,1,""],DatetimeNormalizerEncoder:[10,1,1,""],Img2VecEncoder:[10,1,1,""],MultiHotEncoder:[10,1,1,""],NumericEncoder:[10,1,1,""],OneHotEncoder:[10,1,1,""],PretrainedLangEncoder:[10,1,1,""],ShortTextEncoder:[10,1,1,""],TextRnnEncoder:[10,5,1,""],TimeSeriesEncoder:[10,1,1,""],TsArrayNumericEncoder:[10,1,1,""],TsNumericEncoder:[10,1,1,""],VocabularyEncoder:[10,1,1,""]},ensemble:{BaseEnsemble:[11,1,1,""],BestOf:[11,1,1,""],MeanEnsemble:[11,1,1,""],ModeEnsemble:[11,1,1,""],WeightedMeanEnsemble:[11,1,1,""]},mixer:{BaseMixer:[15,1,1,""],LightGBM:[15,1,1,""],LightGBMArray:[15,1,1,""],Neural:[15,1,1,""],Regression:[15,1,1,""],SkTime:[15,1,1,""],Unit:[15,1,1,""]}},objnames:{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","method","Python method"],"3":["py","function","Python function"],"4":["py","property","Python property"],"5":["py","attribute","Python attribute"]},objtypes:{"0":"py:module","1":"py:class","2":"py:method","3":"py:function","4":"py:property","5":"py:attribute"},terms:{"0":[7,10,17,18,19,20,21,22,23,24],"000000":[17,24],"0001":[19,23,24],"00014":[19,23,24],"00019599999999999997":24,"00027439999999999995":24,"0003841599999999999":24,"0005378239999999999":24,"0007529535999999998":24,"0010541350399999995":24,"003915625183205856":23,"003940282500626748":23,"00396897013772998":23,"0040611259769975094":23,"00410254764975163":23,"004112129096399274":23,"004132882597153647":23,"004178977953760247":23,"004205447932084401":23,"004242659451668723":23,"0042474141246394105":23,"0042895584252842685":23,"004315985190240961":23,"004340721536100957":23,"004357850760744329":23,"0043628366892797905":23,"004394709227377908":23,"004398583738427413":23,"00440603481572971":23,"004414253694969311":23,"004449873953534846":23,"004484773205037703":23,"004498099365778136":23,"004558674494425456":23,"004563712864591364":23,"004568418233018173":23,"004570525518634863":23,"004572713087525284":23,"004595928704529478":23,"004655592012823674":23,"004687661141679998":23,"004697896095744351":23,"004704843226232026":23,"004732183615366618":23,"0047617426566910325":23,"004799575188703704":23,"004856080601089879":23,"00491229374157755":23,"004992981385766414":23,"005274":21,"005300704742732801":23,"005441865690967493":23,"0064399814919421546":23,"0077215013273975305":23,"008983":21,"009300":24,"009431":21,"01":[7,23],"010309":21,"013570":24,"014724":21,"018307":21,"02":23,"020042672178201507":23,"021053":21,"03":[13,23],"033":17,"04":23,"040324918955":18,"045414":24,"05":23,"05131785199046135":24,"05133713781833649":24,"05137106031179428":24,"05156172439455986":24,"05157444253563881":24,"05171119421720505":24,"05178629234433174":24,"05181184783577919":24,"05184203386306763":24,"05194811150431633":24,"05239948257803917":24,"05273965373635292":24,"05285469442605972":24,"0528554692864418":24,"053060129284858704":24,"05336076393723488":24,"054013":17,"0540977418422699":24,"05455196276307106":24,"054767243564128876":24,"05490746721625328":24,"05525219812989235":24,"055353":21,"05569766089320183":24,"056835610419511795":24,"05746406316757202":24,"057854749262332916":24,"057915804286797844":24,"058218929916620255":24,"058798886835575104":24,"058977":24,"05949181318283081":24,"059752":21,"060018":21,"06127836927771568":24,"061458":21,"062094":24,"0621829479932785":24,"062418":24,"062723":21,"06285689026117325":24,"06355087459087372":24,"06360626469055812":24,"06447519361972809":24,"06457449619968732":24,"06492673171063264":24,"06545061928530534":24,"06553898006677628":24,"066928":21,"06783530339598656":24,"0679960281898578":24,"0683063194155693":24,"06892643496394157":24,"06960804760456085":24,"06978078782558442":24,"0706694945693016":24,"07171888339022796":24,"07175709307193756":24,"0718848429620266":24,"07201590612530709":24,"072781":21,"07304742932319641":24,"07307156516859929":24,"074196":24,"07440945506095886":24,"0766073539853096":24,"07746117562055588":24,"07790301740169525":24,"07820077985525131":24,"07849359512329102":24,"07869081199169159":24,"078803":21,"07936403155326843":24,"07996372133493423":24,"08":23,"0804857686161995":24,"08120812475681305":24,"0822099968791008":24,"082361":21,"08349908888339996":24,"085102":21,"0899":23,"09":23,"0900391936302185":24,"09420691430568695":24,"095921":21,"098698":21,"09954904764890671":24,"0m":17,"0x15685d970":23,"0x7fa84c42f640":19,"0x7fa85c015970":19,"1":[7,8,10,13,23,24],"10":[8,10,13,18,19,20,23,24],"100":[2,10,19,22],"100000":23,"1005":[19,23,24],"101288":21,"102":22,"1030":24,"10328":22,"10642":20,"10668":18,"10669219493865967":24,"108300":21,"11":[19,20,22,23,24],"11000":18,"110474":21,"11420":22,"116":23,"1162123903632164":24,"12":[19,20,23,24],"1200":22,"121406078338623":24,"123":21,"12500":18,"12512":22,"125895":21,"126":22,"12749":17,"12800":22,"12839828431606293":24,"1284000":22,"128539":21,"13":[18,19,20,23,24],"1306731291":20,"131403695":22,"1325":23,"133558":21,"13552":18,"13604":22,"137458":21,"139097":21,"14":[19,20,23,24],"141267":21,"143":22,"14354":21,"144647":24,"145":18,"14696":22,"149":21,"15":[17,18,19,20,21,22,23,24],"150":18,"1500":22,"151":[19,23],"155604":24,"156":[19,23],"156551":24,"15735":18,"15788":22,"1582":17,"158233":21,"1592":22,"16":[19,23,24],"161383256316185":24,"1637":23,"1644320487976074":19,"165":23,"16500":18,"166480":21,"167170":21,"16800":18,"16880":22,"17":[19,22,23,24],"172":[19,23],"17300":18,"1749":23,"175575":21,"176":19,"17612512409687042":24,"176220":22,"17972":22,"18":[19,21,22,23,24],"18424":21,"185226":21,"189115":21,"19":[19,22,23,24],"190321":21,"19059491157531738":24,"19064":22,"191857":21,"193603":24,"19612012470445245":23,"1983":23,"1998":18,"1e6":21,"1m":17,"1st":13,"2":[23,24],"20":[18,19,23,24],"200":24,"201":24,"20156":22,"2016":18,"2017":18,"2019":18,"202":24,"2021":18,"203":24,"204":24,"20496906340122223":24,"205":24,"206":24,"206010":21,"21":[19,23,24],"21013741093675975":23,"212":22,"21248":22,"215153":21,"216030836105347":24,"21926474571228027":24,"219422":21,"22":[19,23,24],"221929":21,"222":[19,22],"22340":22,"225":[19,22],"225775":21,"23":[19,22,23,24],"233132":24,"23354031145572662":24,"23432":22,"2364":20,"237609":21,"239599":21,"24":[19,22,23,24],"24524":22,"2467":23,"247197":17,"247203":21,"247676":21,"2478":17,"247998":21,"24920770561408":23,"25":[19,22,23,24],"2538":23,"255425":21,"256":10,"25616":22,"2594":10,"25952":18,"26":[19,22,23,24],"26286360":22,"266151":21,"266369":24,"2667108178138733":24,"26708":22,"267220800":23,"26783383":22,"2684":22,"269899200":23,"27":[19,22,23,24],"270533":21,"272577600":23,"276790":24,"277838":21,"27800":22,"28":[19,22,23,24],"2815":23,"2816":23,"2817":23,"2818":23,"2819":23,"282":23,"2820":23,"2834":17,"284197":24,"284807":21,"286972":24,"2879962623119354":24,"28799878891615":23,"288135493289204":23,"28892":22,"29":[19,22,23,24],"2916666666666667":19,"294":22,"297044":24,"298":20,"29946":18,"29984":22,"3":[14,23,24],"30":[18,20,22,23,24],"3011":18,"301788":24,"303":20,"3080":13,"31":[13,21,23,24],"31076":22,"31100":22,"3151411712169647":24,"315372":17,"32":[22,23,24],"32168":22,"325":22,"3260806582190774":23,"3272357068278573":23,"327642":21,"32765168764374475":23,"3281749730760401":23,"3284675722772425":23,"329136921600862":23,"33":[23,24],"33007449995387683":23,"3303144524494807":23,"33072087665398914":23,"330986554423968":23,"3315189927816391":23,"33260":22,"33309372514486313":23,"33339183280865353":23,"336043":24,"338321":21,"339846":21,"34":[17,18,19,20,21,23,24],"3400":22,"340163":21,"340259":17,"340259125":17,"3431932":22,"34352":22,"3497759997844696":24,"35":[22,23,24],"35444":22,"356139":24,"358354":21,"359807":21,"36":[23,24],"3600":10,"360410":24,"36203":18,"363787":21,"365298":24,"36536":22,"37":[22,23,24],"37628":22,"377436":21,"3776":22,"378":21,"378155":21,"379780":21,"37c1b32fb":17,"38":[22,23,24],"387024":21,"38720":22,"39":[17,18,19,21,22,23,24],"392253":24,"3933545649051666":24,"39812":22,"4":[20,22,23,24],"40":[22,23,24],"403034":21,"407193":21,"40904":22,"409258":24,"41":[23,24],"41996":22,"42":[23,24],"420":[17,18,20,21,22],"421035":24,"42831":22,"43":[22,23,24],"43088":22,"43094":22,"43442972004413605":23,"43443459272384644":23,"4344787895679474":23,"4345344454050064":23,"43462760746479034":23,"43514105677604675":23,"4355204701423645":23,"43554021418094635":23,"43557313084602356":23,"4356466382741928":23,"43584632873535156":23,"4364318400621414":23,"437072":22,"43708017468452454":23,"4372607320547104":23,"4373621940612793":19,"43739429116249084":23,"4375789165496826":23,"4378361850976944":23,"43840254843235016":23,"43935835361480713":23,"4398685395717621":23,"44":[23,24],"4403578191995621":23,"44084450602531433":23,"4413738548755646":23,"44180":22,"44207488000392914":23,"44328153133392334":23,"44403648376464844":23,"4448719322681427":23,"4457828402519226":23,"4467353969812393":23,"4470987617969513":24,"4477883279323578":23,"448154":21,"4496418982744217":23,"45":[23,24],"450007":17,"45088090002536774":23,"45216208696365356":23,"45272":22,"4535674601793289":23,"4550795406103134":23,"45666399598121643":23,"458037514903":17,"45930930972099304":23,"46":[23,24],"4609563549359639":19,"46110378205776215":23,"462388":21,"4630257934331894":23,"46364":22,"464009":17,"4650762975215912":23,"4671967923641205":23,"46866":23,"46942955255508423":23,"47":[22,23,24],"47318898141384125":23,"47456":22,"4757150560617447":23,"476676":17,"4783552885055542":23,"48":[23,24],"480805":17,"4811210632324219":23,"4839773178100586":23,"48548":22,"4868":22,"48690974712371826":23,"49":[18,23,24],"491487":24,"49173182249069214":23,"4948585033416748":23,"49640":22,"49815742671489716":23,"5":[13,19,20,22,23,24],"50":[21,22,23,24],"500":22,"5015637576580048":23,"502292":21,"502444":24,"503198":21,"503390":24,"5050476491451263":23,"50573948541":21,"50732":22,"50752":17,"5086493343114853":23,"51":[23,24],"510845":17,"5145991444587708":23,"514654":21,"51500":21,"515988":24,"51824":22,"5184392035007477":23,"52":[23,24],"5223924517631531":23,"52565693":22,"526373103260994":23,"529026726881663":19,"52916":22,"53":[23,24],"5304456949234009":23,"53131":19,"532525897026062":19,"53258":18,"5345934927463531":23,"536347":21,"5372562408447266":19,"5374646186828613":19,"537568211555481":19,"5376394987106323":19,"5379230976104736":19,"53862726688385":19,"5390430688858032":19,"539453":24,"539688229560852":23,"54":[23,24],"54008":22,"5401763916015625":19,"540443":24,"5407888889312744":19,"5413248538970947":23,"5420359373092651":19,"5436923503875732":19,"5450661":22,"5454663038253784":19,"5455891937017441":23,"5471524000167847":19,"548718":21,"5499386340379715":23,"55":[18,23,24],"5526844263076782":19,"554344117641449":23,"55539071559906":19,"5580050945281982":19,"5587586611509323":23,"56":[21,22,23,24],"56096":20,"5617222785949707":19,"56294903755188":19,"5632813721895218":23,"5647767543792725":19,"5649737119674683":19,"5690934658050537":19,"57":[21,23,24],"5704":23,"5707968175411224":23,"5716121435165404":19,"5728715658187866":19,"5729807138442993":19,"575617328286171":23,"5768787622451783":19,"5786747932434082":19,"58":[23,24],"580118":17,"5805662572383881":23,"582642912864685":19,"583":24,"5856661349534988":23,"586897850036621":19,"5892131805419922":19,"59":[23,24],"5909084677696228":23,"5914497375488281":19,"592941":21,"5959053039550781":19,"5960":22,"5960601553597429":24,"5962131917476654":23,"5999916791915894":19,"6":[13,18,19,20,22,23,24],"60":[22,23,24],"6043835878372192":19,"6046575754880905":23,"606808602809906":24,"6085857152938843":19,"609":23,"6099573820829391":23,"61":[22,23,24],"6116881370544434":19,"6120732029279072":19,"613209":24,"614564061164856":19,"6152833849191666":23,"618":24,"6183":22,"62":[21,23,24],"6205589026212692":23,"6230510274569194":19,"623629":24,"624":24,"6257634907960892":23,"63":[20,23,24],"6309126764535904":23,"631037":24,"633811":24,"638672":21,"6389893442392349":23,"64":[18,23,24],"643883":24,"6441417187452316":23,"64593":21,"647376":21,"6494599282741547":23,"65":[23,24],"650":22,"6548376232385635":23,"66":[21,23,24],"6601350754499435":23,"6652606427669525":23,"66666666666666":23,"666731":24,"67":[18,23,24],"6728083938360214":23,"6772531270980835":23,"68":[23,24],"6813623607158661":23,"6823285222053528":24,"682883":24,"6850549429655075":23,"6882499903440475":23,"689281":21,"69":[21,23,24],"6908893585205078":23,"6920":18,"69654655456543":24,"7":[17,18,19,20,21,22,23,24],"70":[23,24],"7026":22,"7052":22,"71":[23,24],"712138":24,"715254":24,"7169036865234375":24,"72":[23,24],"73":[23,24],"7302289009094238":24,"739093":24,"74":[23,24],"75":[23,24],"753":22,"756097":24,"76":[22,23,24],"7602126598358154":24,"7608699202537537":24,"767":22,"767875":24,"76853489507859":23,"76867159945164":23,"77":23,"771679":21,"773209":21,"7780":17,"7796856760978699":23,"78":23,"7833187580108643":24,"78845027":22,"79":[23,24],"791461":21,"792993":21,"798278":21,"8":[17,18,19,20,21,23,24],"80":[8,17,23],"800499":21,"80746268275371":23,"81":23,"8144":22,"8149038461538461":20,"8155415058135986":24,"817739":21,"82":[22,23,24],"83":[23,24],"838326":24,"84":[23,24],"8405624628067017":24,"846764":24,"849283":24,"85":23,"850229":24,"85aa80a4c":17,"86":[22,23],"862827":24,"863291":21,"87":[17,23],"877737":21,"88":23,"880":19,"886292":24,"89":[22,23],"9":[17,18,19,20,21,23,24],"90":23,"907417297363281":24,"909412":21,"91181":24,"92":22,"9203720092773438":24,"9236":22,"93":[22,24],"94":20,"9439":22,"95":[20,22,24],"950204":24,"954948":24,"960048":24,"966272":21,"97":[20,22],"970469":24,"976":22,"977876":24,"979":24,"98":[19,20,22],"980650":24,"989204":24,"99":[7,21],"9944201":22,"9991":24,"abstract":[6,7,8,13,14,17,19,21,22,24],"case":[0,7,8,17,18,19,20,21,22],"catch":21,"class":[0,2,6,7,8,10,11,14,15,17,18,19,20,21,22],"default":[5,7,8,9,10,13,14,15,18,19,20,22],"do":[2,4,7,13,17,18,19,20,21,23,24],"export":13,"final":[10,11,13,20,21,23,24],"float":[2,7,8,9,17,18,19,20,21,22],"function":[2,4,5,6,7,8,9,10,12,14,17,19,21,23],"import":[0,2,7,11,13,15,17,18,19,20,21,22,23,24],"int":[7,8,10,15,18,19,20,21,23,24],"long":[2,7,10,13,15],"new":[0,2,4,6,7,13,15,17,19,21,24],"null":[17,18,20],"return":[0,4,5,6,7,8,9,10,11,15,17,18,19,20,21,23],"short":2,"static":[0,7],"super":[19,20],"switch":18,"throw":[17,20],"true":[5,7,8,10,15,17,18,20,21,22],"try":[7,17,18,19,20,21,23],"while":[5,13,18,19,21],A:[4,5,6,7,8,9,14,17,19,21,23],And:[17,19],As:[0,6,15,17,19,21,22,23,24],At:17,Be:13,Being:[13,19],By:[13,18,20],For:[0,2,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,24],If:[5,7,8,10,13,14,15,17,21,23,24],In:[6,13,17,18,19,20,21,22,23,24],Is:22,It:[0,4,10,13,17,23],Its:0,No:6,Not:18,OR:9,Of:6,On:7,Or:19,TO:[17,21],That:[17,20,22],The:[0,1,2,4,5,6,7,8,9,10,13,14,15,16,17,18,19,21,22,24],Then:[8,20,21],There:[7,11,15,17,18,21,22,23],These:[5,7,13,17,18,19,21],To:[7,10,13,16,17,18,19,22,24],Will:[19,23],With:[6,18],_:[20,23],__call__:[11,15,20],__class__:19,__init__:[11,17,18,19,20,21,23],__main__:19,__name__:19,__version__:19,_get_columns_to_clean:17,_n_dim:10,_remove_column:17,_setitem_single_block:23,a1:18,a2jdxxbl9a1e16341560437535849:23,a3:18,a4:18,a6:18,abid:13,abl:[7,18,19,22],about:[4,7,8,9,17,22],abov:[13,17,18,19,21],acc_stat:19,access:[0,17,18,21,22],accessor:19,accompani:10,accord:[6,8,14,17,18,19,22],accordingli:6,account:13,accstat:[0,17,19,20,21,23,24],accuraci:[0,7,11,19,20,23,24],accuracy_funct:[0,7,11,17,18,19,20,21,24],accuracy_histogram:7,accuracy_sampl:7,achiev:22,acquir:24,across:24,act:11,action:19,activ:23,actual:[17,19,23],ad:[0,13,19],add:[8,13,17,19,21],add_subplot:21,addcmul_:[19,23,24],addit:[0,7,8,9,10,13,15,17,18,19,22,23,24],addition:[0,7,22],additional_info:[7,22],address:[13,17],adjust:[6,17,19,20,21,23,24],adopt:[6,18],affect:[2,13],aforement:8,after:[7,10,11,14,15,17,18,19,21,24],afterward:22,ag:[20,24],aggreg:[11,24],agnost:0,agre:13,agreement:13,ahead:23,ai:[1,6,7,8,13,14,19,20,22],aim:[15,17,18],algorithm:[0,7,17,19,21],alia:[10,19,23],all:[0,2,6,7,8,9,10,11,13,15,17,18,19,20,21,22,23,24],all_mix:[7,17,21],all_predict:19,allow:[5,6,7,13,17,18,21],allow_incomplete_histori:[7,17,18,20,21,22],along:[17,21,22,23],alpha:7,alphabet:18,alreadi:[6,13,15,17,19,22],also:[0,6,10,13,17,19,20,21,22],altern:[2,7,13],although:[18,21],alwai:[15,18,21],amalg:6,among:[17,22,23],amount:[7,15,21,23],amp:[23,24],an:[0,2,4,5,6,7,8,9,10,13,15,17,18,19,20,21,22,23,24],analys:0,analysi:[4,6,7,10,13,14,16,17,18,20,21,23,24],analysis_block:[0,7,17,19,21],analyz:[0,7,8,17,18,19,20,21,22,23,24],analyze_data:[6,17,18,21],analyze_dataset:4,analyze_ensembl:[6,17,21],ani:[0,6,7,8,10,11,13,15,17,19,21,22,24],announc:13,anomali:[7,8,9,17,23],anomaly_cooldown:[0,7,17,21],anomaly_detect:[0,7,8,9,17,18,20,21,22],anomaly_error_r:[0,7,17,21],answer:21,anticip:7,anyon:13,anyth:[2,8],anywai:20,apart:[19,23],api:[2,4,5,6,9,13,17,18,19,20,21,22,23,24],appear:[17,21],append:[17,19,20,21],appl:18,appli:[6,8,11,17,18,21],applic:[7,22],appreci:23,approach:[2,5,6,7,10,13,14,17,18,21],appropri:[7,9,17,21],approv:13,approxim:21,aptli:17,ar:[0,2,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,24],arang:[19,22],arbitrari:19,architectur:[13,24],area:[19,22],aren:22,arg:[5,6,7,10,11,13,17,18,19,20,21,24],argument:[7,17,18,19,21,22,23],arnet:15,around:[6,13,21],arrai:[2,10,13,19,23],arrang:18,arrayencod:10,ask:[13,15,18,19,22],aspect:7,assign:[5,6,7,14,17,18,21,22],associ:[7,18,23],assum:[10,15,17,21],astyp:19,attribut:[6,7,10,11,15,18,19],audio:2,augment:[15,17],auto:[17,20],autoarima:15,autogener:[4,17,21],autom:[5,13,19,22],automat:[5,7,10,13,17,18,21,22,24],automl:[13,19,22],autopep8:13,avail:[2,5,7,8,10,15,22,23,24],averag:7,average_loss:23,avg_words_per_sent:[7,22],avoid:8,awai:17,awar:22,ax:[19,21,22],axi:18,b69ac6792:17,back:[7,18],backbon:[10,15],balanced_accuracy_scor:[20,21],ballroom:17,banana:18,bar:22,barplot:22,base:[4,5,8,9,10,11,14,15,16,17,21,22,23],baseanalysisblock:[0,19],baseencod:[8,10,14,15,17,18,20,21],baseensembl:[11,17,19,21],basemix:[14,15,17,20,21,24],bashrc:13,basi:13,basic:[2,6,22],batch:10,batch_siz:10,bear:17,beauti:17,becaus:[15,21,23],becom:13,been:[6,10,15,17,18,19,20,21,22,23,24],befor:[13,17],begin:23,behavior:[8,14,18,19],behaviour:20,being:7,belong:19,below:[7,9,16,17,18,21],benchmark:[13,18,20,23],best:[6,7,11,17,18,19,20,21,23,24],bestof:[11,15,17,18,19,20,21,24],beta2:[19,23],better:[4,11],between:[7,13,19,20,22],bia:7,bias:22,biased_bucket:22,big:13,bin:21,binari:[2,8,20,21],binaryencod:[10,20,21],bit:[18,19,20,24],black:[18,24],blob:20,block:[0,7,8,9,17,21,22,24],boi:17,boilerpl:13,bool:[5,7,8,9,10,15,17,18,19],borrow:17,both:[7,10,15,18,19,22,23,24],bother:20,bound:[7,10,23],branch:[13,17],brief:14,broader:19,bucket:7,budget:[7,10,15],build:[5,16,24],bump:[17,21],bunch:22,c12129c31:17,c:19,ca:20,cach:8,calcul:19,calibr:[0,7,19],call:[0,4,5,7,8,13,14,18,19,20,22,24],callabl:[9,17],came:[17,19],can:[0,2,4,5,6,7,8,9,10,14,15,16,17,18,19,20,21,22,23,24],can_be_nan_numer:17,cannot:19,canva:17,capabl:24,capita:[19,22],car:18,card:21,care:8,carri:7,cast:[8,19,20],cat:18,categor:[2,6,7,8,10,14,17,18,19,20,22],categori:[7,13,18,19,21],categoricalautoencod:10,caveat:[21,23],cd:13,cell:19,cement:24,center:[19,20],certain:[5,17],challeng:17,chanc:13,chang:[6,7,13,17,18,21],charact:2,character:23,chat:13,check:[5,7,8,10,13,16,17,18,19,21,22,23],checkout:[13,18],choic:[8,10,18,19],chol:20,choos:[7,13,17,21],chosen:6,ci:13,circumst:15,cl:[10,14],classif:[6,11,15,20],classifi:[7,10],clean:[2,5,6,7,8,16,17,18,19,20,21,23,24],clean_data:[17,21],cleaned_data:[17,18,21],cleaner:[2,7,8,9,13,14,21],clear:8,clear_cach:8,clf:20,cli:13,clone:13,close:[7,13],cm:22,cmap:[19,22],coarseaggreg:24,code:[4,5,6,7,19,20,23,24],code_from_json_ai:[4,5,13,17,18,19,20,21,23],code_from_problem:4,codebas:13,col:[17,21,22],col_dtyp:5,col_index:21,col_nam:[5,17,18,21],colnam:22,color:[19,21,22,23],colum:22,column:[0,2,5,6,7,8,9,10,13,14,15,17,18,19,20,21,23,24],column_data:[10,18],column_import:7,column_nam:8,columnar:10,com:[13,18,19,20,22,23,24],combin:[7,8,13],come:[16,19,22,23,24],comfort:17,command:[13,17,18,21],comment:13,commit:13,common:8,commonli:23,compani:21,compar:[0,18,19,24],compare_data:18,compat:18,compil:[6,17,21],complet:[13,17,18,22],complex:[2,5,7,13,19],compli:15,compon:[7,21],compos:19,compstat:0,comput:[0,8,19],concat:[10,17,18,20,21],concatedencodedd:[8,17,20,21],concaten:[8,15],concatenated_train_dev:[17,21],concern:0,concret:24,concrete_strength:24,conduct:[7,17],confid:[0,7,19,20,23,24],confidence_norm:[0,17,21],confidenti:21,config:[1,7,13],confirm:[13,19],conform:[0,19],confus:[0,7,19],confusion_matrix:7,connect:15,consecut:7,consid:[2,7,17,18,19,21,22,23,24],consist:[7,10,18],constitu:10,constraint:[19,23],construct:[7,16,20],constructor:7,contain:[0,4,5,6,7,8,9,15,17,19,21,23],context:[7,8],continu:[13,17,19,21],control:18,conveni:[8,19,22],convert:[6,7,14,17,18],convieni:24,copi:[17,21,23],core:[0,8,13,14,17,19,23],corpu:17,corr:19,corrcoef:19,correct:[7,10],correctli:2,correl:19,correspond:[7,19,20],cotton:17,could:[17,20,21],count:[7,21,23],countri:[19,22],coupl:[18,23],cours:19,covari:[0,7],cover:17,cp:20,cpp:[19,23,24],cpu:[18,19,23],crash:7,creat:[4,5,6,7,8,13,14,16,19,20,22],creation:6,credit:21,creditcard:21,criteria:[5,7,21],crucial:[0,19,22],crystal:17,csrc:[19,23,24],csv:[13,17,18,19,20,21,22,23,24],cuda:[13,23,24],cudnn:13,current:[2,7,10,15,19],custom:[2,4,5,6,7,9,10,13,22,23,24],custom_clean:17,custom_cleaner_pipelin:17,custom_cleaning_funct:[8,9,17],custom_splitt:21,custom_splitter_pipelin:21,custom_token:10,custom_train:10,customiz:[19,22],cut:17,cutoff:23,cycl:[10,13],d:[4,13,17,18,20],dai:10,data:[0,1,4,5,6,7,10,11,13,14,15,19,20,22,24],data_dtyp:[7,9,17,18,20],data_fram:8,data_typ:10,data_window:10,dataanalysi:[4,7],dataclass:22,datafram:[0,4,6,7,8,9,13,17,18,19,20,21,23,24],datapoint:18,datasci:17,dataset:[0,5,6,7,8,10,13,14,15,16,17,18,19,20,21,23,24],datasourc:[8,24],datatyp:17,date:[2,13],datetim:[2,17],datetimeencod:10,datetimenormalizerencod:10,dateutil:17,datset:23,datum:10,dd1000b26:17,ddir:[17,21],deafultnet:15,deal:[2,19],debug:[10,19,23,24],decidedli:17,declar:13,decod:[10,18,20],decoded_predict:20,decompos:10,decor:17,dedic:18,deduct:[17,18,19,20,21,22,23,24],deep:13,deepcopi:17,def:[17,18,19,20,21],default_json:[17,21],defaultnet:[17,21],defin:[5,10,13,19,20,24],definit:[2,4,7,10,15,17,18,21,22,23],degre:22,delta:8,democrat:13,demonstr:17,dens:15,densiti:[19,22],dep:[0,17,21],depart:17,depend:[5,7,8,10,13,17,20,21],dependency_data:[10,17,21],deploi:[6,7],deprec:[19,23,24],describ:[7,10,18,19],descript:7,descriptor:[2,7],design:[16,18,19,22],detail:[4,5,7,8,10,13,17,18,19,21],detect:[7,8,9,17,18,21],detector:7,determin:[8,15],dev:[6,8,14,15,17,18,20,21,24],dev_data:[15,20],dev_priming_data:10,develop:[13,19,22],deviat:7,devic:[17,21],df:[4,13,17,18,19,20,21,22,23,24],df_std_dev:22,df_target_stddev:7,diagnos:20,diamond:17,dict:[0,4,6,7,8,9,10,15,17,18,19,20,21,23,24],dictionari:[0,4,6,7,8,10,13,15,17,18,19,21,24],diesel:18,differ:[0,7,13,17,19,22,23,24],difficult:21,dim1_data:10,dimens:18,dimension:[10,18],dinner:17,directli:[18,22],directori:[13,19],disabl:[8,13,15,23,24],disable_column_import:[0,17,21],discov:13,discret:[2,22],discuss:[13,19],diseas:20,distilbert:10,distinct:[7,22],distribut:[6,7,17,21,22],divers:21,doc:[21,23],docssrc:[17,21],document:[7,15,17,19,21,22,23],doe:[0,13,22,24],doesn:[5,6,13,19],domain:23,don:[13,20],done:[8,13,17,18,19,20,23,24],dot:19,download:[17,21],downstream:[6,17],dragonfruit:18,draw:13,drop:[17,18,19,20,21,23,24],ds:[7,13,20],dtype:[1,7,9,17,18,20,21,22],dtype_dict:[0,8,9,11,15,17,19,20,21],due:[15,21],duplic:8,dure:[0,5,10,13],dust:17,dynam:[0,7,10],e1:10,e2:10,e:[0,5,6,7,8,10,13,15,17,19,21,22,24],each:[5,6,7,8,9,10,11,13,14,15,17,18,19,21,22,23],earli:[15,20,24],early_stopping_round:[19,23],easi:[13,17,22],easier:[13,17,23],easiest:[4,17],easili:[19,22],edit:[2,4,5,17,20,21],effect:8,either:[0,7,8,9,10,14,18],element:[5,7,8,9,10,17,21],els:[10,17,18,21,22],embed:10,embed_mod:10,emit:[7,8,23],empti:2,enabl:[5,7,13,17,21,23,24],enc:18,enc_data:[6,17,21],enc_ft:18,enc_train_test:[17,21],encdata:18,encod:[1,5,6,7,8,13,14,15,16,17,19,20,21,22,23,24],encode_json:7,encode_on:10,encoded_d:[8,17,19,20,21],encoded_data:[0,10,17,18,21],encoded_dev_data:[17,21],encoded_ds_arr:8,encoded_new_data:[17,21],encoded_old_data:[17,21],encoded_represent:10,encoded_target_valu:[10,17,21],encoded_test_data:[17,21],encoded_train_data:[17,21],encoded_val_data:19,encoded_valu:10,encodedd:[8,15,17,19,20,21,24],encoder_prepping_dict:[17,21],encoder_typ:10,encompass:6,encount:10,encourag:2,end:[0,6,7,13,17,20,21],enforc:[17,18],engin:[2,7,17,18,19,21,23],engines:18,english:17,ensembl:[0,6,7,13,15,17,18,19,20,21,23,24],ensur:[6,7,15,17,21],enter:13,entir:[6,7,13,17,18,19,21,22,23],entiti:8,entri:13,entropi:22,enumer:18,env:[13,23],epoch:[10,15,19,23,24],equal:[7,21],equival:[17,18,19,20,21,22,23,24],error:[7,17,20,21],essenti:[19,22],estim:[0,6,7,17,19,21,23],etc:[6,7,10,14,17,18,19,20,21],evalu:[0,6,7,11,17,21,23],even:22,event:[13,21],evergreen:17,everi:[0,6,7,8,15,19,24],everyth:[13,19,22,23],evolv:[17,21],ex:[6,7,13,14],exactli:[19,20],examin:18,exampl:[7,17,18,19,21,22,24],exang:20,exce:7,except:[6,13,17,19,20,21],excerpt:17,excit:13,exec:[17,21],exec_modul:[17,21],execut:[10,15,19],exhibit:22,exist:[2,6,13,15,17,20,21,23],exp_avg_sq:[19,23],expandus:[17,21],expect:[6,7,10,15,18,19,21],experi:18,experiment:[2,6,13,15],explain:[0,7,13,16,17,19,20,21,23,24],explainer_block:[0,17,21],explan:0,explicit:6,explicitli:[2,6,7,8,17,18,19,21],explor:[17,22],extend:[19,22],extra:21,extract:[0,7,8,17,21],f:[17,20,21,22,24],fact:[21,22],factor:22,fail:[7,15,20],fairli:[19,22,24],fall:23,fals:[7,10,15,17,18,20,21,22,23,24],familiar:[17,21],fashion:[0,10],fayr:17,fb:20,featur:[0,2,5,6,7,10,15,16,17,18,19,20,21,23,24],feature_a:7,feature_data:[17,21],feature_import:19,feedback:[10,13],feedback_hoop_funct:10,feel:[13,17],few:[8,13,17,18],field:[7,17,22],fig:[19,22],figsiz:[21,22,23],figur:[21,23],file:[2,4,5,6,7,10,13,17,18,19,20,21],file_nam:[17,21],file_path:6,filenam:[17,18,21],fill:[6,13,17,18,21],find:18,fine:[7,22],fineaggreg:24,finish:[17,18,19,20,21,22,23,24],fire:7,first:[4,8,13,17,18,19,20,21,22,24],fit:[6,7,10,15,17,19,20,21,23],fit_data_len:15,fit_on_al:[7,17,18,20,21,22],fit_on_dev:[15,17,18,20,21,24],fit_resampl:21,fix:[7,10,13],fixed_confid:[0,7,17,21],fixed_signific:[0,17,21],flag:[2,7,8,10,13,17,18],flake8:13,flake8path:13,flatten:19,flexibl:[7,19,22],floattensor:10,floor:17,flour:17,flyash:24,focu:[8,13,17,18,19],folder:[17,21],follow:[6,7,8,10,13,16,17,18,19,20,21,23,24],font:21,foramat:20,forc:7,forecast:[7,8,15],forest:[13,20],fork:13,form:[2,4,7,10,13,18],format:[2,7,8,13,19,20,23],formatt:[18,24],former:[0,19],found:[7,17,18,19,21,23,24],four:7,fp:[17,21],frac:[21,24],fraction:[6,8],frame:[0,6,8,18,21],framework:[7,13],fraud:21,free:13,freez:10,frequenc:7,friendli:20,from:[0,4,5,6,7,8,9,10,13,14,15,17,19,20,21,22,23,24],from_dict:[4,7,13,17,18,19,20,21,22,23,24],from_json:[7,17,21],frozen:10,ft:18,ft_data:18,fueltyp:18,fulfil:15,full:[6,13,19,21],fulli:[5,15,17,19,21,22],fun:19,further:[6,7,13,17,21,23,24],futur:[0,7,8],g:[0,7,8,10,13,15,19,22,24],game:17,garden:17,gather:17,gbm:[19,23],gdp:[19,22],gener:[0,2,4,5,6,7,8,9,10,11,13,14,15,19,20,22],generate_json_ai:5,get:[0,7,8,10,13,17,18,20,21,22,24],get_cleaning_func:[9,17],get_column_original_data:[8,18],get_encoded_column_data:[8,18],get_encoded_data:[8,17,21],get_next_count:10,getattr:22,gift:13,girl:17,git:13,github:[0,13,17,20,21],githubusercont:[13,18,19,20,22,23,24],give:[13,17,21,22,24],given:[5,6,7,8,10,15,17,18,19,21,22,23,24],glitter:17,global:[0,13],global_insight:[0,17,19,21],globalfeatureimport:[0,17,19,20,21,23,24],go:[13,17,18,19,20,21,24],goal:[0,7,13,17,18,19,21],goe:[17,21],gone:[23,24],good:[19,24],got:[19,20,23,24],gpl:13,gpu:[10,13,18,19,23],grad:[19,23],grad_scal:23,gradscal:[23,24],great:[13,17],greater:2,greatli:21,green:23,grei:23,ground:[17,21],group:[7,10],group_bi:[7,8,17,18,20,21,22],group_combin:8,grouped_bi:10,gt:[17,19,21,23],guarante:13,guid:18,guidelin:13,ha:[2,6,7,8,9,10,13,15,17,18,19,20,21,22,23,24],had:17,hand:[18,22,23],handl:[7,8,10,16,17,18,19,20,23],handle_unknown:10,has_pretrained_text_enc:19,hasattr:[17,21],hash:17,have:[6,7,8,13,17,18,19,20,21,22,23,24],hdi:[13,19,22],head:[17,18,21,22],hear:13,heart:20,heart_diseas:20,heatmap:19,heavi:21,held:[7,19,24],help:6,helper:[8,10,13,17,18,19,21],henc:[7,17,21],here:[0,2,10,13,17,18,21,22],hidden:21,high:[7,19,22,23],high_level:[4,13,19,20,22,23,24],highli:[2,7,17,23],highlight:17,hillock:17,hist:[21,22],histogram:7,histor:[7,8,10],historical_column:[7,17,18,20,21,22],hold:6,home:[19,21],hood:7,hope:[17,21],horizon:7,host:6,hot:10,hou:17,how:[1,2,5,6,7,8,9,15,16,17,18,19,21,22,23,24],howev:[5,7,13,17,18,19,21],html:[0,21,23],http:[0,13,18,19,20,21,22,23,24],human:[19,22],hung:17,hybrid:18,hyper:[20,21],hyperparamet:15,i:[5,6,7,17,18,19,20],icicl:17,icp:[0,17,19,21,24],id:[7,13,17,24],idea:[13,19,24],ideal:[8,9,13,17,18,21],identifi:[6,7,8,9,14,17,18,19,20,21,22],idx:18,ignor:[7,17,21],ignore_featur:[7,17,18,20,21,22],ilabel_dict:18,iloc:[13,17,21,23],im:19,imag:[2,10],imagin:18,imbal:21,imbalanc:[6,22],imblearn:21,img2vecencod:10,iml_methods_limit:0,immedi:15,impact:24,implement:[2,10,13,15,17,18,21,23,24],impli:7,import_dir:[17,21],importlib:[17,21],impos:9,improv:[13,17,21,24],imshow:19,inact:[17,21],incent:13,includ:[2,7,8,15,17,18,19,21],include_target:[8,17,21],incom:7,incorpor:6,increas:[21,24],index:[13,18,19,21,22,23],indic:[2,7,8,19,21],individu:[17,21],induct:[0,19],industri:23,infant:[19,22],infer:[0,2,7,8,9,17,18,19,20,21,22],infer_typ:[7,22],info:[0,10,17,18,19,20,21,22,23,24],inform:[0,2,4,6,7,8,9,17,19,21],informat:7,ingredi:[17,22],inherit:[0,2,6,8,14,18,19],initi:[10,15,17,18,20,21],input:[0,2,6,7,8,10,13,15,18,20,21,23,24],input_col:[15,17,19,21],insid:[4,7,8,10,19,20],insight:[0,4,7,8,17,19,20,21,22,23,24],inspect:18,inspir:17,instal:23,instanc:[0,6,7,21,22],instanti:[6,7,13,17,18,21],instead:[7,13,17,19,20,21,23,24],instruct:[14,17,18,21],intak:[5,6,7,14,15,18,21],integ:[2,7,17,18,19,20,21,22],intend:[15,17,21],interact:6,interest:[6,7,13,14,17,21,23],interfac:[1,24],interior:17,intermedi:[19,20],intern:[6,7,10,11,15,17,19,21,22,23,24],interv:[8,23],introduc:[13,18],intuit:20,invalid:[2,7,8,9,17],invers:[7,10,18],invit:17,io:0,irregular:17,is_classif:19,is_multi_t:19,is_nn_encod:10,is_numer:19,is_prepar:[10,18],is_target:[5,10,17,18,20,21],is_target_predicting_encod:5,is_timeseri:[7,17,18,20,21,22],is_timeseries_encod:[10,18],is_trainable_encod:[10,17,18,21],isinst:[18,19],issu:[8,13],item:[17,18,21,22],iter:[0,19,23],itertool:21,its:[0,5,7,10,13,14,15,17,19,20,21,23,24],itself:[0,22],j:[6,17,19,21],jai:24,join:[13,17,21],journei:13,json:[1,7,8,13,14,19,20,22],json_ai:[4,5,13,17,18,19,20,21,22,23],json_ai_from_problem:[4,13,17,18,19,20,21,23,24],jsonai:[4,5,7,17,18,19,21],jupyt:[17,21],just:[18,19,21,22],k:[10,21],kaggl:[17,18,21],keep:[14,17,21,23,24],kei:[6,7,8,13,17,19,20,21,22,24],keyword:[17,21],kind:[7,10],know:[13,19,22,23],kwarg:[0,17,19,21],label:[2,11,13,15,18,19,21,23],label_dict:18,labeldict:18,laid:17,lambda:[17,18],landscap:17,languag:[2,10,14,16],laptop:13,larg:21,largest:10,last:[8,15],lastli:[17,21],later:[6,17,20,21,22],latest:13,latter:[0,19],layer:10,learn:[2,4,6,7,8,10,13,14,15,16,17,18,19,20,21,22,23,24],least:13,leav:[0,7,23],legend:23,len:[17,19,21,22,23,24],length:[6,8,10,15,17,18,19,20,21,23,24],less:[8,21],let:[13,17,18,19,20,21,22,23,24],level:[0,7,14,18,23],leverag:[19,22,23,24],lgbm:[19,23],li:17,lib:[19,23],librari:[19,22],licens:17,life:13,lightgbm:[15,17,18,19,20,21,23],lightgbmarrai:[15,23],lightli:17,lightweight:[17,19,21,22],lightwood:[1,2,4,6,8,10,11,15,18,21,23,24],lightwood_modul:[18,19,20],like:[7,8,17,18,19,21,23,24],limit:2,line:[17,18],link:[18,21],lint:13,linux:19,list:[7,8,10,11,17,18,19,21,22],literaci:[19,22],live:[13,17],ll:[17,18,19,20,21,23],lmu:0,load:[7,13,24],loader:[17,21],loc:21,local:[13,23],locat:[6,17,21],loco:0,log:[17,18,21],logger:10,logic:[8,10,11,15,19,24],look:[7,17,18,19,21,22,23,24],lookup_encod:5,loss:[19,23,24],lot:19,love:13,low:22,lower:[23,24],lt:[19,23],lw:[17,18,21],m:[13,20],machin:[2,7,13,15,17,19,21,22,23],machineri:[17,21],maco:19,made:[7,19],magnitud:10,mai:[0,2,5,7,8,10,13,17,18,21],main:[8,13,17,18,19,20,23,24],maintain:8,major:13,make:[0,7,13,15,16,17,18,20,21,22,23],malform:[8,9,17],mandatori:7,mani:[7,8,13,17,18,21,22,23],manner:[17,19],manual:[4,7,18,23],manufactur:23,map:[17,18,20,21,22],matplotlib:[19,21,22,23],matric:19,matrix:[0,7,19],matter:17,max:[10,13],max_depth:20,max_encoded_length:10,max_position_embed:10,maxim:7,maximum:7,mayb:10,mc:19,mdb:19,mean:[7,8,10,18,19],meanensembl:11,meaning:17,meant:20,measur:[7,23],medium:22,mention:[21,24],merg:[8,13],messag:20,method:[0,4,7,10,11,13,15,18,19,20,21,22,23,24],methodolog:[2,7,18],metric:[0,7,19,24],mi:[19,22],midnight:13,might:[10,13,18,19,22,23,24],mileag:18,mimic:21,mindsdb:[13,18,19,20,22,23,24],minim:[17,21],minimum:[7,21],minmax:10,minor:21,minut:23,miscellan:2,miss:[7,8,9,17,21],mission:13,mixer:[0,6,7,11,13,14,16,17,18,19,21,22,23,24],mixer_correl:19,mixer_nam:19,ml:[5,6,7,10,13,14,19,22],mod_nam:[17,21],mode:[8,9,10,13,17,21],modeensembl:11,model:[0,4,5,6,7,8,10,11,15,17,18,19,21,23],model_analysi:[0,17,21],model_analyz:[0,17,21],model_correl:19,model_nam:10,model_select:21,modelanalysi:[0,7],modelcorrelationheatmap:19,modif:[20,22,23,24],modifi:[0,4,8,17,18,19,22,24],modified_json:[17,21],modul:[1,7,8,10,18,19,20,23,24],modular:17,moduletyp:[17,21],mondai:13,monei:21,month:[10,13,23],monthli:[13,23],monthly_sunspot:23,more:[7,8,10,13,15,17,18,19,20,21],mortal:[19,22],most:[13,17,21,23],mostli:[18,21],motiv:13,move:[10,18],mpg:18,mr:17,much:[7,8,9,17,18,19,21,23],mul_:[19,23],multidimension:10,multihotencod:10,multimedia:13,multipl:[8,13,24],multivari:23,must:[2,6,7,8,10,18],mut_method_cal:[17,21],my_custom_heart_disease_predictor:20,my_dict:7,mycolumndata:18,mycustomclean:[17,21],mycustomsplitt:[17,21],myencod:7,myfeaturedata:18,mysplitt:21,n:[10,17,22],n_:18,n_column:6,n_output_dim:18,n_row:18,n_sampl:6,n_sentenc:10,n_ts_predict:15,naiv:8,name:[0,2,5,7,8,10,13,15,17,18,21,22,23],nan:17,natasha:[19,21],nation:22,natur:[6,19],nc:19,nclean:17,ndarrai:19,ndistribut:21,necessari:[5,6,7,17,19,21,22],necessarili:0,need:[0,6,7,10,13,18,20,22,23,24],neg:17,nembed_dim:10,net:[15,17,21],network:[10,15],neural:[10,15,17,18,19,20,21,23,24],never:[7,17],new_acc:24,new_data:[6,17,21],new_predict:24,newli:[19,24],newlin:13,newslett:13,next:[0,4,23],nice:[19,24],nltk:[17,21],nnew:24,non:[7,17,21],none:[0,4,6,7,8,9,10,11,15,17,18,20,21,22,23],noqa:[17,18,21],nor:22,normal:[0,8,10,14,22],normal_predict:19,north:17,notat:19,note:[0,7,8,13,15,17,18,19,21,23,24],notebook:[16,17,18,21,23],noth:[6,18],notic:[17,19,22],notion:23,nov:13,novemb:13,now:[6,17,18,19,20,21,22,23,24],np:[17,19,21,22],nr:15,nr_predict:[7,8,17,18,20,21,22,23],nr_row:[7,22],ns:19,ntarget:17,nuanc:[6,7],num_iter:[19,23],number:[2,7,10,13,17,18,19,21,23,24],numer:[2,6,7,8,10,14,17,18,21,22],numericencod:[10,17,18,20,21],numpi:[17,19,21,22],nvidia:13,nxk:10,obj:7,object:[0,4,5,6,7,8,9,10,11,13,14,15,18],oblig:17,observ:[7,8,15,21,22,23],obtain:[0,7,8],occur:[6,7],octob:13,off:[17,21],offer:[6,19,22,24],often:7,ok:[19,22],okai:[19,23],old:[6,17,21,24],old_acc:24,oldpeak:20,omit:[17,21],onc:[0,7,13,15,17,18,19,20,21,23,24],one:[0,4,7,10,13,17,19,20,22,23,24],onehotencod:[10,18,20],ones:[18,22],onli:[5,7,8,9,11,13,15,17,18,19,20,21,22,24],onlin:[13,20],open:[8,13,17,21],oper:[17,19],opinion:13,oppos:[10,18],optim:20,option:[0,4,7,8,10,13,17,18,20,21],orang:18,order:[2,5,7,10,11,13,17,18,20,21,23],order_bi:[7,8,10,17,18,20,21,22,23],order_month:23,org:[21,23],organ:18,orig_data:18,origin:[8,10,15,17,18,20,21],original_dtyp:10,original_typ:[10,17,21],os:[17,21],other:[6,7,10,14,17,18,20,21,22,23],otherwis:[8,9,17],our:[13,17,18,19,21,22,23,24],out:[0,6,7,13,16,17,18,21,22,24],outlier:22,outlin:13,output:[0,6,7,8,10,11,14,15,18,19,20,23,24],output_s:[10,18],output_typ:[10,17],outsid:[17,23],over:[0,7,8,17],over_sampl:21,overal:19,overload:[19,23,24],overrid:[2,5,6,7,9,13,14,17,18],own:[2,5,7,14,18,19,20,22,23],pack:13,packag:[17,19,21,23],pain:23,pair:7,palac:17,palm:17,panda:[8,13,17,18,19,20,21,22,23,24],panel:7,parallel:[17,21],parallel_prepped_encod:[17,21],param:[0,7,10,17,18,19,21,23],paramet:[0,4,5,6,7,8,9,10,15,20,21,22,23],parent:19,pars:17,parse_dt:17,parser:17,part:[2,4,13],partial:[15,17,20,21],partial_fit:[15,17,20,21,24],particular:[6,17,18,19],particularli:5,pass:[0,5,7,10,13,21,22,23,24],past:23,path:[6,10,13,17,21],path_to:13,pato:23,pc:21,pca:21,pct_dev:[8,17,21],pct_invalid:[7,8,9,17,18,20,21,22],pct_test:[8,17],pct_train:[8,17,21],pd:[8,10,13,17,18,19,20,21,22,23,24],pdef:[13,18,19,20,23,24],peform:6,peopl:17,per:[5,7,8,10,11,14,15,18,19],percent:7,percentag:21,perform:[6,7,8,13,14,17,18,19,20,21,23,24],petrol:18,pfi:0,phase:[0,8,19,22],philosophi:13,pick:[18,19,20,23,24],pickl:[4,20],piec:[17,19,20],pip3:13,pip:[13,23],pipelin:[0,2,4,5,6,7,8,13,14,16,18,19,22,23],place:22,plant:17,player:17,pleas:[2,8,13,17,18,19,21],plot:[19,22,23],plt:[19,21,22,23],plu:[8,9,17],point:[0,7,8,10,13,15,18,19,23],pole:17,pop:[19,22],popul:[0,5,6,7,8,17,18,19,20,21,22,23,24],popular:13,posit:[7,17,21],positive_domain:[0,7,10,17,18,20,21,22],possibl:[5,7,13],post:[13,17,19,21],posterior:8,potenti:[0,7,21],powder:17,power:[13,17],pr:13,pre:[6,7,8,9,10,13,15,18,21],pred:13,pred_arg:[17,18,20,21,24],predict:[0,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,24],predict_proba:7,predictionargu:[6,7,17,20,21],predictor:[0,1,4,7,8,9,13,15,16,17,19,20,21,22],predictor_from_cod:[4,13,17,18,19,20,21,23],predictor_from_json_ai:[4,24],predictor_from_problem:4,predictor_from_st:4,predictorinterfac:[4,5,6,17,21,24],predit:15,predominantli:14,preexist:24,prefer:13,prep:[17,18,19,20,21,23,24],prepar:[5,6,7,10,11,13,17,18,19,20,21,23,24],preprocess:[2,6,7,13,18],preprocessor:16,prescrib:9,present:[10,17],preserv:2,pretrain:[7,10],pretrainedlangencod:[10,17],previou:[0,7,10,13,19],previous:[0,6,18,19,24],price:18,prime:[17,21],priming_data:[10,17,18,21],principl:21,print:[7,13,17,18,20,21,22,23,24],prior:7,privat:13,prize:[13,17],probabl:7,problem:[4,6,7,8,9,10,13,15,17,18,20,21,22,23],problem_definit:[4,5,7,13,17,18,19,20,21,22,23,24],problem_defint:5,problemdefinit:[4,5,7,13,17,18,19,20,21,22,23,24],proce:[17,21],procedur:[0,2,5,7,8,10,15,17,19,21,24],process:[2,5,6,7,8,9,10,15,18,19,20,21,22,23,24],processor:21,procid:10,produc:[10,14,19,22,23],product:21,programat:18,prohibit:5,project:13,projected_numb:10,properli:7,properti:[8,17,21],proport:8,propos:13,protocol:17,provid:[6,7,8,9,10,13,17,18,21,22,23],pst:13,pull:13,purpl:23,purpos:22,push:13,put:20,py:[17,19,20,21,23],pydata:[21,23],pylanc:13,pyplot:[19,21,22,23],pyright:13,python2:13,python3:[13,19,23],python:[7,13,18,22],python_arg_pars:[19,23,24],pythonpath:13,pytorch:[10,13,14,19],pytorch_rang:[19,23],q:[13,17],qualiti:[6,17,21],quantiti:[2,23],question:[13,19,21,22],quick:[16,22],quickli:[17,21],r2:24,r2_score:[17,18,24],r:[17,21],r_ok:[17,21],rais:[17,20,21],ran:22,random:[8,13,20,21],random_forest_mix:20,random_st:[21,24],randomforestclassifi:20,randomforestmix:20,randomli:8,rang:[19,22,23],ranger:[19,23],rare:21,rate:[7,19,23,24],rather:[4,11,15],ravel:21,raw:[4,5,6,7,8,9,10,13,17,18,19,20,22,23,24],rcparam:21,re:[7,10,17,18,19,20,21],reach:[6,13,17],read:[13,17,19,20,21],read_csv:[13,17,18,19,20,21,22,23,24],readabl:17,readi:[4,15,17,18,19,21,23],real:17,realli:22,reason:21,receiv:[10,13,19],recip:7,recommend:[7,13,17,21],reconstruct:10,recurr:10,ref:7,refer:[0,6,7,8,14,15,17,18,21,22],referenc:6,refit:0,regard:19,region:23,regress:[6,15,17,18,19,20,21],regular:13,relat:[8,9,17],releas:13,relev:[6,8,9,17,19,22],reli:18,remain:21,rememb:22,remot:13,remov:[7,8,17,21],repeat:8,repetit:13,replac:[13,17,18,20],repo:13,report:[0,7],repositori:13,repres:[2,7,8,10,13,14,18],represent:[5,6,7,8,9,10,14,15,16,17,18,19,20,21],reproduc:21,request:[6,13,15,17,19,20,21,23,24],requir:[0,2,5,6,7,13,15,17,18,19,21,24],research:13,reserv:[17,18,21],reset_index:21,reshap:23,residu:8,resolut:10,respect:[6,7,8,10,14,19,21],respond:13,rest:[0,7,8,22,23],restecg:20,result:[4,6],retail:23,retriev:[10,19],revert:18,rich:2,rich_text:[2,17],right:[2,19],rnn:10,rnnencod:10,roger:17,room:17,rotat:22,rough:24,roughli:0,round:[17,19,22,24],routin:6,row:[0,7,8,13,18,21,22,23,24],row_index:21,row_insight:[0,19],rtx:13,rtype:7,rug:17,rule:[10,11,14,16],rumpl:17,run:[0,5,7,13,15,18,20,23,24],runtim:0,runtime_analyz:[0,17,19,21],s:[4,6,7,8,9,14,15,17,18,19,20,21,23,24],sale:18,same:[4,6],sampl:[7,8,17,18,19,20,21,22,23,24],saniti:21,satisfi:18,save:[4,6,17,19,20,21],scale:10,scan:19,scenario:18,scene:17,scienc:[7,13,18],scientist:13,scikit:21,score:[0,11,13,15,17,24],scratch:[4,17,20,24],script:[17,18,19,21],search:[15,17,21,22],search_hyperparamet:[15,17,18,20,21,24],second:[7,10,19,23],seconds_per_encod:[7,17,18,20,21,22],seconds_per_mix:[7,17,18,20,21,22,24],section:13,see:[0,2,8,10,13,16,18,19,22,23,24],seed:[7,8,17,21],seed_nr:[7,17,18,20,21,22],seemingli:22,seen:[7,8],seismic:19,select:[7,15],selector:11,self:[6,10,17,18,19,20,21,23],sell:18,semest:23,send:13,sens:17,sensibl:10,sequenc:[2,10],sequenti:[0,2,10,19],seri:[2,7,8,10,13,15,16,17,18,21],set:[0,6,7,8,9,10,11,14,15,17,18,19,20,21,22,23,24],set_titl:[21,22],set_xlabel:21,set_xtick:[19,21,22],set_xticklabel:[19,21,22],set_ylabel:21,set_ylim:21,set_ytick:19,set_yticklabel:19,settingwithcopywarn:[21,23],setup:[17,19,21],setup_nn:10,sever:[2,6,7,13,17,19,21,22],sex:20,shall:[7,21],shape:[10,19,23,24],share:13,ship:13,short_text:[2,17],shorttextencod:10,should:[0,2,5,7,10,11,13,15,18,19,23,24],show:[13,17,18,19,22,23],shuffl:[8,21],sign:13,signal:10,signatur:[19,23,24],silent:17,similar:17,simpl:[7,8,10,16,18,19,21,22,23,24],simplenamespac:19,simpli:[17,21],simplic:18,simplif:6,sinc:[13,20],singl:[8,10,19,20,23,24],sinusoid:10,site:[19,23],situat:6,size:[7,10,18,21,22],skelet:6,skin:17,skip:20,sklearn:[20,21,24],sktime:15,slack:13,slag:24,slice:[21,23],slope:20,slower:[19,23],small:[2,7,17],smoothli:17,smote:21,smote_model:21,snow:17,snowsho:17,so:[0,2,7,13,17,18,19,20,21,22,23,24],softmax:0,solv:[13,20],some:[4,6,8,15,17,18,20,21,22,24],someon:20,someth:13,somewh:17,somewhat:[15,17,19,23],son:[6,17,21],soon:[16,17],sort:18,sourc:[0,2,4,5,6,7,8,9,10,11,13,15,17,20,21],sourcefileload:[17,21],south:17,space:10,speak:0,special:13,specif:[2,4,5,7,8,10,13,14,15,17,18,21,23],specifi:[0,4,5,6,7,8,9,13,17,18,19,21,22,23],spectrum:[19,22],spend:7,split:[6,7,8,13,14,15,16,17,18,19,20,23,24],split_data:[6,17,18,21],splitter:[6,7,8,13,14,17,18],sprinkl:17,sq:[19,22],squeez:18,stabl:[15,17,19,21,22,23],stage:[0,13,24],stai:[13,17],stan:22,standalon:20,standard:[2,7,8,9,17,20],standard_error:17,start:[0,6,13,15,17,18,19,20,21,22,23,24],stat:[0,17,18,21],state:[8,15,23],state_fil:4,statement:10,statist:[4,6,7,14,17,18,19,20,21,23,24],statistical_analysi:[5,7,17,18,20,21,22],statisticalanalysi:[5,7,18,19,22],statisticalanalyz:22,stats_info:[0,17,19,21],statu:22,step:[2,6,7,10,13,14,17,18,21,23],still:[6,7],stop:[15,17,20,21,24],stop_aft:[10,15,17,18,20,21,24],stop_word:17,stopword:17,store:[0,6,7,8,19,22],str:[0,4,5,6,7,8,9,10,15,17,18,19,20,21],straightforward:19,strategi:[7,10,17,18,21],stratif:19,stratifi:[6,8,19,21],stratified_on:8,stream:8,strength:24,strewn:17,strict_mod:[7,17,18,20,21,22],strictli:[7,17],string:7,strip:17,structur:[6,10,19],studio:13,style:13,sub:[7,10],subject:6,submit:13,subplot:[19,22],subroutin:7,subsequ:[2,4,7,19,21],subset:[0,15,19,24],success:18,successfulli:[10,13],suggest:[7,13],suit:[13,18],sundai:13,sunspot:23,superplastic:24,supervis:[7,21],support:[2,7,10,11,13,14,15,16,18,21,23],supports_proba:[11,15,17,21],suppos:18,sure:[0,13,17,23],suspect:[7,14],swag:13,sy:[17,21],sync:13,syntax:[5,7,13,14,17,21],t:[5,6,8,13,19,20,21,22],tab10:22,tabular:16,tag:[2,13,23],tak:7,take:[7,8,9,10,15,17,18,19,20,22,23],taken:[7,21],target:[0,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,24],target_class_distribut:[7,10,20,21,22],target_dtyp:[0,17,21],target_encod:[15,17,20,21],target_nam:[0,17,21],target_norm:8,target_typ:[7,17,18,20,21,22],target_weight:[7,17,18,20,21,22],task:[7,8,10,11,13,15,17,19,20,24],tax:18,team:13,techniqu:[2,6,7,17,21],tell:[17,19,20],templat:[5,13],tempor:[2,7,10],tensor1:[19,23,24],tensor2:[19,23,24],tensor:[8,10,18,19,20,23,24],tensorflow:13,term:[13,17,22],test:[6,7,8,13,14,15,16,17,18,20,22,23,24],test_df:[13,24],test_sample_s:7,test_siz:21,text:[2,4,7,10,13,17,19,21],textrnnencod:10,thal:20,thalach:20,than:[4,8,11,15,19,21,23],thank:19,thankfulli:21,thei:[2,7,8,13,15,17,18,19,21],them:[0,6,8,13,17,19,20,22,23,24],theori:17,therefor:7,thi:[0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,24],thing:[8,13,21,22],think:22,those:[10,13],though:[20,22],three:[7,17,19,21,24],through:[0,7,13,15,16,17,19,21,23,24],throughout:13,thu:[6,20],tick:19,tight_layout:21,tighter:7,time:[0,2,7,8,10,13,15,16,17,19,21],time_aim:[7,17,18,19,20,21,22,24],time_seri:8,timeseri:[7,8,9,16,17,23],timeseries_analyz:[7,8],timeseries_set:[0,7,8,9,15,17,18,20,21,22,23],timeseries_transform:7,timeseriesencod:10,timeseriesset:[7,8,9,15,17,19,21,22],timestamp:[10,23],timestep:[7,10,23],tip:13,titl:23,to_dict:[4,7],to_json:[7,13,17,18,20,21],to_replac:17,todo:[7,10],togeth:[7,11,13,17,20],token:[10,14],toler:7,tolist:[18,20],too:10,took:15,tool:[7,22],torch:[8,10,17,18,19,20,21,23,24],total:[13,15,17,18,19,20,21,22,23,24],track:13,train:[0,4,5,6,7,8,9,10,13,15,16,17,18,19,20,22],train_d:24,train_data:[0,15,17,19,20,21],train_dev_test:[17,21],train_df:24,train_observed_class:[7,22],train_priming_data:10,train_sample_s:7,train_siz:21,train_test_data:[17,21],train_test_split:21,trained_mix:[17,21],transact:21,transform:[7,8,10,14,18,23],transform_timeseri:8,transmiss:18,transpar:[17,21],travel:17,treat:[6,7,18],treatment:[2,7,10,17],trestbp:20,tri:[20,22],trick:13,trigger:[7,19,23,24],truli:13,truth:[20,23,24],ts_analysi:[0,10,11,15,17,18,20,21],ts_cfg:[0,17,19,21],ts_data_point:10,ts_naive_ma:8,ts_naive_residu:8,tsarraynumericencod:10,tsnumericencod:10,tss:[8,17,23],tuft:17,tune:[7,13,21],tupl:[0,17,19],turn:[5,8,9,17,18,20,21],tutori:[2,13,17,18,21,24],two:[10,11,15,18,19,20,21,22,24],txt:13,type:[0,1,4,5,6,8,9,10,13,14,15,17,18,19,20,21,22,23,24],type_inform:[5,7,22],typeinform:[5,7,22],ubiquit:23,ugli:[18,24],ultim:17,unabl:[18,24],unassign:10,unbias_target:[7,17,18,20,21,22],uncom:23,under:[7,13,15],underbalanc:21,underli:[8,11,17],undersampl:21,understand:[4,7,19,22],union:[4,7,17,18],uniqu:[13,18],unit:[10,13,15],unittest:13,univari:23,unix_timestamp:10,unknown:[2,5,7,18,21],unknown_token:10,unless:7,unload:4,unprocess:[6,13,17,21],unspport:19,unsqueez:18,unstabl:7,unsuit:20,unsupervis:7,until:6,untrain:6,unwant:17,up:[7,10,17,19,21,23],updat:[6,7,13,15,17,18,19,20,21,23],update_d:24,update_data:[17,21],update_df:24,upon:17,upper:[23,24],url:10,url_leg:17,us:[0,2,4,5,6,7,8,10,11,14,15,16,18,19,21,22,23,24],usag:[15,22,24],use_optuna:15,use_previous_target:[7,17,18,20,21,22],useabl:6,used_car_pric:18,user:[0,1,2,5,7,9,13,14,16,17,19,21,22,23],user_guid:[21,23],usernam:19,userwarn:[19,23],uses_target:[17,21],usual:[4,21],util:[8,19,23,24],v10:21,v11:21,v12:21,v13:21,v14:21,v15:21,v16:21,v17:21,v18:21,v19:21,v1:21,v20:21,v21:21,v22:21,v23:21,v24:21,v25:21,v26:21,v27:21,v28:21,v2:21,v3:21,v4:21,v5:21,v6:21,v7:21,v8:21,v9:21,v_i:21,va:19,valid:[0,5,7,11,13,15,17,19,20,21,23,24],validate_json_ai:5,valu:[0,2,5,6,7,8,10,13,15,17,18,19,21,22,23,24],valuabl:[22,23],values_for_nan_and_none_in_panda:17,vanilla:10,variabl:[0,17],variant:[0,10],varieti:[13,17,21],variou:[12,13,18],ve:[17,18,19,21],vector:[6,10,17,21],ver:21,veri:[13,17,19,22,23],version:[13,17,19,21,23],versu:[2,21,23],via:[4,6,7,10,13,14,18],video:2,view:[21,23],virtual:13,visual:[4,13,19],vocabulari:2,vocabularyencod:10,w:[10,17,19,21],wa:[8,17,21,22,23,24],wai:[2,4,17,18,20,22],walk:[16,17,21],wall:17,want:[2,4,8,10,13,17,18,19,20,21,22,23,24],warn:[17,19,21,23],wasn:8,water:24,we:[6,7,8,9,13,14,15,16,17,18,20,21,22,23,24],weak:[19,23],wear:17,websit:13,week:10,weekli:13,weight:7,weightedmeanensembl:11,welcom:2,well:[7,8,10,18,19,22],went:13,were:[8,17,18],what:[2,5,7,8,13,17,18,21,22,23],whatev:22,when:[0,4,7,10,17,19,21,22,24],where:[0,2,7,8,9,10,13,17,18,19,20,21,22],wherea:[14,18],whether:[5,7,8,10,11,15,18,19,21,22],which:[6,7,8,9,13,15,17,18,19,20,21,23],white:17,whiteboard:13,who:[13,17],wider:7,window:[7,8,10,17,18,20,21,22,23],winner:13,winter:17,wise:[0,10],wish:[7,13],within:[5,6,7,8,13,17,18,19,21],without:[7,10,13,18,19,24],won:21,wonder:17,word:[2,7,17],work:[11,13,15,17,18,19,21,23],workflow:13,worth:13,would:[4,17,18,22,24],wrap:[7,11,24],wrapper:[19,22],write:[4,13,17,21],x:[10,13,17,18,20,21,22],x_dev:21,x_test:21,x_train:21,xlabel:23,xtrain_mod:21,y:[20,21,22],y_dev:21,y_test:21,y_train:21,ydf:20,year:[10,13,18],yh:20,yield:[6,7,11,15,23],ylabel:23,you:[2,4,6,8,10,16,17,18,19,20,21,22,23,24],young:17,your:[1,2,4,6,7,14,19,20,22,23,24],ytrain_mod:21,zip:[17,21]},titles:["Analysis","API","Data Types (dtypes)","Encode your data","JSON-AI Config","JSON-AI Config","Predictor Interface","Lightwood API Types","Data","Data Cleaning","Encoders","Ensemble","Helpers","Welcome to Lightwood\u2019s Documentation!","Lightwood Philosophy","Mixers","Tutorials","Using your own pre-processing methods in Lightwood","Custom Encoder: Rule-Based","Tutorial - Implementing a custom analysis block in Lightwood","Tutorial - Implementing a custom mixer in Lightwood","Build your own training/testing split","Tutorial - Introduction to Lightwood\u2019s statistical analysis","Tutorial - Time series forecasting","Introduction"],titleterms:{"07":[17,21],"1":[17,18,19,20,21,22],"10":[17,21],"2":[17,18,19,20,21,22],"2021":[13,17,21],"3":[17,18,19,20,21,22],"4":[17,18,19,21],"5":[17,18,21],"6":[17,21],"case":13,"default":[17,21],"final":[19,22],The:20,ai:[4,5,16,17,18,21],amount:22,analysi:[0,19,22],api:[1,7],base:18,bia:22,block:19,bring:[13,16],bucket:22,bug:13,build:[14,17,21],byom:13,call:[17,21],can:13,clean:[9,14],cleaner:17,code:[13,17,18,21],column:22,commun:13,complex:16,conclus:[23,24],conduct:13,config:[4,5],content:1,contribut:13,contributor:13,creat:[17,18,21],current:13,custom:[16,17,18,19,20,21],data:[2,3,8,9,16,17,18,21,23],dataset:22,date:[17,21],defin:[22,23],dev:13,document:13,dtype:2,edit:18,encod:[3,10,18],engin:14,ensembl:11,environ:13,exampl:13,expos:19,featur:[13,14],figur:19,forecast:23,from:18,gener:[17,18,21,23],get:16,guid:13,hacktoberfest:13,help:13,helper:12,histogram:22,how:13,i:14,ii:14,iii:14,implement:[19,20],inform:22,initi:24,insid:22,instal:13,interfac:[6,20],introduc:[17,21],introduct:[19,20,22,23,24],json:[4,5,16,17,18,21],labelencod:18,licens:13,lightwood:[7,13,14,16,17,19,20,22],lightwood_modul:[17,21],link:13,load:[17,18,21,22,23],method:[16,17],miss:22,mixer:[15,20],ml:[17,21],model:[13,14,16,24],modul:[17,21],more:16,need:19,object:[17,19,20,21,22,23],other:13,our:20,out:19,output:[17,21],own:[13,16,17,21],particip:13,peek:22,per:22,philosophi:14,pipelin:[17,21],place:[17,21],pre:[14,17],predict:[22,23],predictor:[6,18,23,24],preprocess:[17,21],process:[13,14,17],python:[17,21],quick:13,report:13,repres:[17,21],review:13,rule:18,run:[16,17,19,21,22],s:[13,22],see:[17,21],seri:23,set:13,split:21,splitter:21,start:16,statist:22,step:[19,20,22],syntax:18,tabl:1,task:[22,23],test:[19,21],thought:22,time:23,train:[14,21,23,24],tutori:[16,19,20,22,23],type:[2,7,16],up:13,updat:24,us:[13,17,20],visual:23,vscode:13,we:19,welcom:13,what:19,write:20,you:13,your:[3,13,16,17,18,21]}}) \ No newline at end of file diff --git a/docs/tutorials.html b/docs/tutorials.html deleted file mode 100644 index 7a99f9fcb..000000000 --- a/docs/tutorials.html +++ /dev/null @@ -1,285 +0,0 @@ - - - - - - - - - - Tutorials — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Tutorials

-
-
-
-

Getting started with Lightwood and JSON-AI

-

The following tutorial will walk you through a simple tabular dataset with JSON-AI.

-
-
How to use Lightwood for your data (Coming Soon!)
- -
-
-
-

Run models with more complex data types

-

Below, you can see how Lightwood handles language and time-series data.

-
-
Using Language Models (Coming Soon!)
-
Make your own timeseries predictor (Coming Soon!)
-
-
-
-

Bring your own custom methods

-

We support users bringing their custom methods. To learn how to build your own pipelines, check out the following notebooks:

- -
-
- - -
- -
- -
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/custom_cleaner/custom_cleaner.html b/docs/tutorials/custom_cleaner/custom_cleaner.html deleted file mode 100644 index 1adcf7341..000000000 --- a/docs/tutorials/custom_cleaner/custom_cleaner.html +++ /dev/null @@ -1,1608 +0,0 @@ - - - - - - - - - - Using your own pre-processing methods in Lightwood — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Using your own pre-processing methods in Lightwood
  • - - -
  • - - - View page source - - -
  • - -
- - -
-
-
-
- - - -
-

Using your own pre-processing methods in Lightwood

-
-

Date: 2021.10.07

-

For the notebook below, we’ll be exploring how to make custom pre-processing methods for our data. Lightwood has standard cleaning protocols to handle a variety of different data types, however, we want users to feel comfortable augmenting and addressing their own changes. To do so, we’ll highlight the approach we would take below:

-

We will use data from Kaggle.

-

The data has several columns, but ultimately aims to use text to predict a readability score. There are also some columns that I do not want to use when making predictions, such as url_legal, license, among others.

-

In this tutorial, we’re going to focus on making changes to 2 columns: (1) excerpt, a text column, and ensuring we remove stop words using NLTK. (2) target, the goal to predict; we will make this explicitly non-negative.

-

Note, for this ACTUAL challenge, negative and positive are meaningful. We are using this as an example dataset to demonstrate how you can make changes to your underlying dataset and proceed to building powerful predictors.

-

Let’s get started!

-
-
[1]:
-
-
-
-import numpy as np
-import pandas as pd
-import torch
-import nltk
-
-import os
-import sys
-
-# Lightwood modules
-import lightwood as lw
-from lightwood import ProblemDefinition, \
-                      JsonAI, \
-                      json_ai_from_problem, \
-                      code_from_json_ai, \
-                      predictor_from_code
-
-
-
-
-

1) Load your data

-

Lightwood uses pandas in order to handle datasets, as this is a very standard package in datascience. We can load our dataset using pandas in the following manner (make sure your data is in the data folder!)

-
-
[2]:
-
-
-
-# Load the data
-ddir = "data/"
-filename = os.path.join(ddir, "train.csv.zip")
-
-data = pd.read_csv(filename)
-data.head()
-
-
-
-
-
[2]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
idurl_legallicenseexcerpttargetstandard_error
0c12129c31NaNNaNWhen the young people returned to the ballroom...-0.3402590.464009
185aa80a4cNaNNaNAll through dinner time, Mrs. Fayre was somewh...-0.3153720.480805
2b69ac6792NaNNaNAs Roger had predicted, the snow departed as q...-0.5801180.476676
3dd1000b26NaNNaNAnd outside before the palace a great garden w...-1.0540130.450007
437c1b32fbNaNNaNOnce upon a time there were Three Bears who li...0.2471970.510845
-
-
-

We see 6 columns, a variety which are numerical, missing numbers, text, and identifiers or “ids”. For our predictive task, we are only interested in 2 such columns, the excerpt and target columns.

-
-
-

2) Create a JSON-AI default object

-

Before we create a custom cleaner object, let’s first create JSON-AI syntax for our problem based on its specifications. We can do so by setting up a ProblemDefinition. The ProblemDefinition allows us to specify the target, the column we intend to predict, along with other details.

-

The end goal of JSON-AI is to provide **a set of instructions on how to compile a machine learning pipeline*.

-

In this case, let’s specify our target, the aptly named target column. We will also tell JSON-AI to throw away features we never intend to use, such as “url_legal”, “license”, and “standard_error”. We can do so in the following lines:

-
-
[3]:
-
-
-
-# Setup the problem definition
-problem_definition = {
-    'target': 'target',
-    "ignore_features": ["url_legal", "license", "standard_error"]
-}
-
-# Generate the j{ai}son syntax
-default_json = json_ai_from_problem(data, problem_definition)
-
-
-
-
-
-
-
-
-
-INFO:lightwood-50752:Dropping features: ['url_legal', 'license', 'standard_error']
-INFO:lightwood-50752:Analyzing a sample of 2478
-INFO:lightwood-50752:from a total population of 2834, this is equivalent to 87.4% of your data.
-INFO:lightwood-50752:Using 15 processes to deduct types.
-INFO:lightwood-50752:Infering type for: id
-INFO:lightwood-50752:Infering type for: target
-INFO:lightwood-50752:Infering type for: excerpt
-INFO:lightwood-50752:Column target has data type float
-INFO:lightwood-50752:Doing text detection for column: id
-INFO:lightwood-50752:Doing text detection for column: excerpt
-INFO:lightwood-50752:Column id has data type categorical
-WARNING:lightwood-50752:Column id is an identifier of type "Hash-like identifier"
-INFO:lightwood-50752:Starting statistical analysis
-INFO:lightwood-50752:Finished statistical analysis
-
-
-
-
-
-
-
-MyCustomCleaner.py
-MyCustomCleaner
-MyCustomSplitter.py
-MyCustomSplitter
-
-
-

Lightwood, as it processes the data, will provide the user a few pieces of information.

-
    -
  1. It drops the features we specify in the ignore_features argument

  2. -
  3. It takes a small sample of data from each column to automatically infer the data type

  4. -
  5. For each column that was not ignored, it identifies the most likely data type.

  6. -
  7. It notices that “ID” is a hash-like-identifier.

  8. -
  9. It conducts a small statistical analysis on the distributions in order to generate syntax.

  10. -
-

As soon as you request a JSON-AI object, Lightwood automatically creates functional syntax from your data. You can see it as follows:

-
-
[4]:
-
-
-
-print(default_json.to_json())
-
-
-
-
-
-
-
-
-{
-    "features": {
-        "excerpt": {
-            "encoder": {
-                "module": "Rich_Text.PretrainedLangEncoder",
-                "args": {
-                    "output_type": "$dtype_dict[$target]",
-                    "stop_after": "$problem_definition.seconds_per_encoder"
-                }
-            }
-        }
-    },
-    "outputs": {
-        "target": {
-            "data_dtype": "float",
-            "encoder": {
-                "module": "Float.NumericEncoder",
-                "args": {
-                    "is_target": "True",
-                    "positive_domain": "$statistical_analysis.positive_domain"
-                }
-            },
-            "mixers": [
-                {
-                    "module": "Neural",
-                    "args": {
-                        "fit_on_dev": true,
-                        "stop_after": "$problem_definition.seconds_per_mixer",
-                        "search_hyperparameters": true
-                    }
-                },
-                {
-                    "module": "LightGBM",
-                    "args": {
-                        "stop_after": "$problem_definition.seconds_per_mixer",
-                        "fit_on_dev": true
-                    }
-                },
-                {
-                    "module": "Regression",
-                    "args": {
-                        "stop_after": "$problem_definition.seconds_per_mixer"
-                    }
-                }
-            ],
-            "ensemble": {
-                "module": "BestOf",
-                "args": {
-                    "args": "$pred_args",
-                    "accuracy_functions": "$accuracy_functions",
-                    "ts_analysis": null
-                }
-            }
-        }
-    },
-    "problem_definition": {
-        "target": "target",
-        "pct_invalid": 2,
-        "unbias_target": true,
-        "seconds_per_mixer": 1582,
-        "seconds_per_encoder": 12749,
-        "time_aim": 7780.458037514903,
-        "target_weights": null,
-        "positive_domain": false,
-        "timeseries_settings": {
-            "is_timeseries": false,
-            "order_by": null,
-            "window": null,
-            "group_by": null,
-            "use_previous_target": true,
-            "nr_predictions": null,
-            "historical_columns": null,
-            "target_type": "",
-            "allow_incomplete_history": false
-        },
-        "anomaly_detection": true,
-        "ignore_features": [
-            "url_legal",
-            "license",
-            "standard_error"
-        ],
-        "fit_on_all": true,
-        "strict_mode": true,
-        "seed_nr": 420
-    },
-    "identifiers": {
-        "id": "Hash-like identifier"
-    },
-    "accuracy_functions": [
-        "r2_score"
-    ]
-}
-
-
-

The above shows the minimal syntax required to create a functional JSON-AI object. For each feature you consider in the dataset, we specify the name of the feature, the type of encoder (feature-engineering method) to process the feature, and key word arguments to process the encoder. For the output, we perform a similar operation, but specify the types of mixers, or algorithms used in making a predictor that can estimate the target. Lastly, we populate the “problem_definition” key with the -ingredients for our ML pipeline.

-

These are the only elements required to get off the ground with JSON-AI. However, we’re interested in making a custom approach. So, let’s make this syntax a file, and introduce our own changes.

-
-
[5]:
-
-
-
-with open("default.json", "w") as fp:
-   fp.write(default_json.to_json())
-
-
-
-
-
-

3) Build your own cleaner module

-

Let’s make a file called MyCustomCleaner.py. To write this file, we will use lightwood.data.cleaner.cleaner as inspiration.

-

The goal output of the cleaner is to provide pre-processing to your dataset - the output is only a pandas DataFrame. In theory, any pre-processing can be done here. However, data can be highly irregular - our default Cleaner function has several main goals:

-
    -
  1. Strip away any identifier, etc. unwanted columns

  2. -
  3. Apply a cleaning function to each column in the dataset, according to that column’s data type

  4. -
  5. Standardize NaN values within each column for appropriate downstream treatment

  6. -
-

You can choose to omit many of these details and completely write this module from scratch, but the easiest way to introduce your custom changes is to borrow the Cleaner function, and add core changes in a custom block.

-

This can be done as follows

-

You can see individual cleaning functions in lightwood.data.cleaner. If you want to entirely replace a cleaning technique given a particular data-type, we invite you to change lightwood.data.cleaner.get_cleaning_func using the argument custom_cleaning_functions; in this dictionary, for a datatype (specified in api.dtype), you can assign your own function to override our defaults.

-
import re
-from copy import deepcopy
-
-import numpy as np
-import pandas as pd
-
-# For time-series
-import datetime
-from dateutil.parser import parse as parse_dt
-
-from lightwood.api.dtype import dtype
-from lightwood.helpers import text
-from lightwood.helpers.log import log
-from lightwood.api.types import TimeseriesSettings
-from lightwood.helpers.numeric import can_be_nan_numeric
-
-# Import NLTK for stopwords
-import nltk
-from nltk.corpus import stopwords
-
-stop_words = set(stopwords.words("english"))
-
-from typing import Dict, List, Optional, Tuple, Callable, Union
-
-# Borrow functions from Lightwood's cleaner
-from lightwood.data.cleaner import (
-    _remove_columns,
-    _get_columns_to_clean,
-    get_cleaning_func,
-)
-
-# Use for standardizing NaNs
-VALUES_FOR_NAN_AND_NONE_IN_PANDAS = [np.nan, "nan", "NaN", "Nan", "None"]
-
-
-def cleaner(
-    data: pd.DataFrame,
-    dtype_dict: Dict[str, str],
-    identifiers: Dict[str, str],
-    target: str,
-    mode: str,
-    timeseries_settings: TimeseriesSettings,
-    anomaly_detection: bool,
-    custom_cleaning_functions: Dict[str, str] = {},
-) -> pd.DataFrame:
-    """
-    The cleaner is a function which takes in the raw data, plus additional information about it's types and about the problem. Based on this it generates a "clean" representation of the data, where each column has an ideal standardized type and all malformed or otherwise missing or invalid elements are turned into ``None``
-
-    :param data: The raw data
-    :param dtype_dict: Type information for each column
-    :param identifiers: A dict containing all identifier typed columns
-    :param target: The target columns
-    :param mode: Can be "predict" or "train"
-    :param timeseries_settings: Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object
-    :param anomaly_detection: Are we detecting anomalies with this predictor?
-
-    :returns: The cleaned data
-    """  # noqa
-
-    data = _remove_columns(
-        data,
-        identifiers,
-        target,
-        mode,
-        timeseries_settings,
-        anomaly_detection,
-        dtype_dict,
-    )
-
-    for col in _get_columns_to_clean(data, dtype_dict, mode, target):
-
-        log.info("Cleaning column =" + str(col))
-        # Get and apply a cleaning function for each data type
-        # If you want to customize the cleaner, it's likely you can to modify ``get_cleaning_func``
-        data[col] = data[col].apply(
-            get_cleaning_func(dtype_dict[col], custom_cleaning_functions)
-        )
-
-        # ------------------------ #
-        # INTRODUCE YOUR CUSTOM BLOCK
-
-        # If column data type is a text type, remove stop-words
-        if dtype_dict[col] in (dtype.rich_text, dtype.short_text):
-            data[col] = data[col].apply(
-                lambda x: " ".join(
-                    [word for word in x.split() if word not in stop_words]
-                )
-            )
-
-        # Enforce numerical columns as non-negative
-        if dtype_dict[col] in (dtype.integer, dtype.float):
-            log.info("Converted " + str(col) + " into strictly non-negative")
-            data[col] = data[col].apply(lambda x: x if x > 0 else 0.0)
-
-        # ------------------------ #
-        data[col] = data[col].replace(
-            to_replace=VALUES_FOR_NAN_AND_NONE_IN_PANDAS, value=None
-        )
-
-    return data
-
-
-
-
-
-

Place your custom module in ~/lightwood_modules

-

We automatically search for custom scripts in your ~/lightwood_modules path. Place your file there. Later, you’ll see when we autogenerate code, that you can change your import location if you choose.

-
-

4) Introduce your custom cleaner in JSON-AI

-

Now let’s introduce our custom cleaner. JSON-AI keeps a lightweight syntax but fills in many default modules (like splitting, cleaning).

-

For the custom cleaner, we’ll work by editing the “cleaner” key. We will change properties within it as follows: (1) “module” - place the name of the function. In our case it will be “MyCustomCleaner.cleaner” (2) “args” - any keyword argument specific to your cleaner’s internals.

-

This will look as follows:

-
"cleaner": {
-    "module": "MyCustomCleaner.cleaner",
-    "args": {
-        "identifiers": "$identifiers",
-        "data": "data",
-        "dtype_dict": "$dtype_dict",
-        "target": "$target",
-        "mode": "$mode",
-        "timeseries_settings": "$problem_definition.timeseries_settings",
-        "anomaly_detection": "$problem_definition.anomaly_detection"
-    }
-
-
-

You may be wondering what the “$” variables reference. In certain cases, we’d like JSON-AI to auto-fill internal variables when automatically generating code, for example, we’ve already specified the “target” - it would be easier to simply refer in a modular sense what that term is. That is what these variables represent.

-

As we borrowed most of the default Cleaner; we keep these arguments. In theory, if we were writing much of these details from scratch, we can customize these values as necessary.

-
-
-

5) Generate Python code representing your ML pipeline

-

Now we’re ready to load up our custom JSON-AI and generate the predictor code!

-

We can do this by first reading in our custom json-syntax, and then calling the function code_from_json_ai.

-
-
[6]:
-
-
-
-# Make changes to your JSON-file and load the custom version
-with open('custom.json', 'r') as fp:
-   modified_json = JsonAI.from_json(fp.read())
-
-#Generate python code that fills in your pipeline
-code = code_from_json_ai(modified_json)
-
-print(code)
-
-# Save code to a file (Optional)
-with open('custom_cleaner_pipeline.py', 'w') as fp:
-    fp.write(code)
-
-
-
-
-
-
-
-
-MyCustomCleaner.py
-MyCustomCleaner
-MyCustomSplitter.py
-MyCustomSplitter
-import lightwood
-from lightwood.analysis import *
-from lightwood.api import *
-from lightwood.data import *
-from lightwood.encoder import *
-from lightwood.ensemble import *
-from lightwood.helpers.device import *
-from lightwood.helpers.general import *
-from lightwood.helpers.log import *
-from lightwood.helpers.numeric import *
-from lightwood.helpers.parallelism import *
-from lightwood.helpers.seed import *
-from lightwood.helpers.text import *
-from lightwood.helpers.torch import *
-from lightwood.mixer import *
-import pandas as pd
-from typing import Dict, List
-import os
-from types import ModuleType
-import importlib.machinery
-import sys
-
-
-for import_dir in [os.path.expanduser("~/lightwood_modules"), "/etc/lightwood_modules"]:
-    if os.path.exists(import_dir) and os.access(import_dir, os.R_OK):
-        for file_name in list(os.walk(import_dir))[0][2]:
-            print(file_name)
-            if file_name[-3:] != ".py":
-                continue
-            mod_name = file_name[:-3]
-            print(mod_name)
-            loader = importlib.machinery.SourceFileLoader(
-                mod_name, os.path.join(import_dir, file_name)
-            )
-            module = ModuleType(loader.name)
-            loader.exec_module(module)
-            sys.modules[mod_name] = module
-            exec(f"import {mod_name}")
-
-
-class Predictor(PredictorInterface):
-    target: str
-    mixers: List[BaseMixer]
-    encoders: Dict[str, BaseEncoder]
-    ensemble: BaseEnsemble
-    mode: str
-
-    def __init__(self):
-        seed(420)
-        self.target = "target"
-        self.mode = "inactive"
-        self.problem_definition = ProblemDefinition.from_dict(
-            {
-                "target": "target",
-                "pct_invalid": 2,
-                "unbias_target": True,
-                "seconds_per_mixer": 1582,
-                "seconds_per_encoder": 12749,
-                "time_aim": 7780.458037514903,
-                "target_weights": None,
-                "positive_domain": False,
-                "timeseries_settings": {
-                    "is_timeseries": False,
-                    "order_by": None,
-                    "window": None,
-                    "group_by": None,
-                    "use_previous_target": True,
-                    "nr_predictions": None,
-                    "historical_columns": None,
-                    "target_type": "",
-                    "allow_incomplete_history": False,
-                },
-                "anomaly_detection": True,
-                "ignore_features": ["url_legal", "license", "standard_error"],
-                "fit_on_all": True,
-                "strict_mode": True,
-                "seed_nr": 420,
-            }
-        )
-        self.accuracy_functions = ["r2_score"]
-        self.identifiers = {"id": "Hash-like identifier"}
-        self.dtype_dict = {"target": "float", "excerpt": "rich_text"}
-
-        # Any feature-column dependencies
-        self.dependencies = {"excerpt": []}
-
-        self.input_cols = ["excerpt"]
-
-        # Initial stats analysis
-        self.statistical_analysis = None
-
-    def analyze_data(self, data: pd.DataFrame) -> None:
-        # Perform a statistical analysis on the unprocessed data
-
-        log.info("Performing statistical analysis on data")
-        self.statistical_analysis = lightwood.data.statistical_analysis(
-            data,
-            self.dtype_dict,
-            {"id": "Hash-like identifier"},
-            self.problem_definition,
-        )
-
-        # Instantiate post-training evaluation
-        self.analysis_blocks = [
-            ICP(
-                fixed_significance=None,
-                confidence_normalizer=False,
-                positive_domain=self.statistical_analysis.positive_domain,
-            ),
-            AccStats(deps=["ICP"]),
-            GlobalFeatureImportance(disable_column_importance=False),
-        ]
-
-    def preprocess(self, data: pd.DataFrame) -> pd.DataFrame:
-        # Preprocess and clean data
-
-        log.info("Cleaning the data")
-        data = MyCustomCleaner.cleaner(
-            data=data,
-            identifiers=self.identifiers,
-            dtype_dict=self.dtype_dict,
-            target=self.target,
-            mode=self.mode,
-            timeseries_settings=self.problem_definition.timeseries_settings,
-            anomaly_detection=self.problem_definition.anomaly_detection,
-        )
-
-        # Time-series blocks
-
-        return data
-
-    def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]:
-        # Split the data into training/testing splits
-
-        log.info("Splitting the data into train/test")
-        train_test_data = splitter(
-            data=data,
-            seed=1,
-            pct_train=80,
-            pct_dev=10,
-            pct_test=10,
-            tss=self.problem_definition.timeseries_settings,
-            target=self.target,
-            dtype_dict=self.dtype_dict,
-        )
-
-        return train_test_data
-
-    def prepare(self, data: Dict[str, pd.DataFrame]) -> None:
-        # Prepare encoders to featurize data
-
-        self.mode = "train"
-
-        if self.statistical_analysis is None:
-            raise Exception("Please run analyze_data first")
-
-        # Column to encoder mapping
-        self.encoders = {
-            "target": Float.NumericEncoder(
-                is_target=True,
-                positive_domain=self.statistical_analysis.positive_domain,
-            ),
-            "excerpt": Rich_Text.PretrainedLangEncoder(
-                output_type=False,
-                stop_after=self.problem_definition.seconds_per_encoder,
-            ),
-        }
-
-        # Prepare the training + dev data
-        concatenated_train_dev = pd.concat([data["train"], data["dev"]])
-
-        log.info("Preparing the encoders")
-
-        encoder_prepping_dict = {}
-
-        # Prepare encoders that do not require learned strategies
-        for col_name, encoder in self.encoders.items():
-            if not encoder.is_trainable_encoder:
-                encoder_prepping_dict[col_name] = [
-                    encoder,
-                    concatenated_train_dev[col_name],
-                    "prepare",
-                ]
-                log.info(
-                    f"Encoder prepping dict length of: {len(encoder_prepping_dict)}"
-                )
-
-        # Setup parallelization
-        parallel_prepped_encoders = mut_method_call(encoder_prepping_dict)
-        for col_name, encoder in parallel_prepped_encoders.items():
-            self.encoders[col_name] = encoder
-
-        # Prepare the target
-        if self.target not in parallel_prepped_encoders:
-            if self.encoders[self.target].is_trainable_encoder:
-                self.encoders[self.target].prepare(
-                    data["train"][self.target], data["dev"][self.target]
-                )
-            else:
-                self.encoders[self.target].prepare(
-                    pd.concat([data["train"], data["dev"]])[self.target]
-                )
-
-        # Prepare any non-target encoders that are learned
-        for col_name, encoder in self.encoders.items():
-            if encoder.is_trainable_encoder:
-                priming_data = pd.concat([data["train"], data["dev"]])
-                kwargs = {}
-                if self.dependencies[col_name]:
-                    kwargs["dependency_data"] = {}
-                    for col in self.dependencies[col_name]:
-                        kwargs["dependency_data"][col] = {
-                            "original_type": self.dtype_dict[col],
-                            "data": priming_data[col],
-                        }
-
-                # If an encoder representation requires the target, provide priming data
-                if hasattr(encoder, "uses_target"):
-                    kwargs["encoded_target_values"] = parallel_prepped_encoders[
-                        self.target
-                    ].encode(priming_data[self.target])
-
-                encoder.prepare(
-                    data["train"][col_name], data["dev"][col_name], **kwargs
-                )
-
-    def featurize(self, split_data: Dict[str, pd.DataFrame]):
-        # Featurize data into numerical representations for models
-
-        log.info("Featurizing the data")
-        feature_data = {key: None for key in split_data.keys()}
-
-        for key, data in split_data.items():
-            feature_data[key] = EncodedDs(self.encoders, data, self.target)
-
-        return feature_data
-
-    def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None:
-        # Fit predictors to estimate target
-
-        self.mode = "train"
-
-        # --------------- #
-        # Extract data
-        # --------------- #
-        # Extract the featurized data into train/dev/test
-        encoded_train_data = enc_data["train"]
-        encoded_dev_data = enc_data["dev"]
-        encoded_test_data = enc_data["test"]
-
-        log.info("Training the mixers")
-
-        # --------------- #
-        # Fit Models
-        # --------------- #
-        # Assign list of mixers
-        self.mixers = [
-            Neural(
-                fit_on_dev=True,
-                search_hyperparameters=True,
-                net="DefaultNet",
-                stop_after=self.problem_definition.seconds_per_mixer,
-                target_encoder=self.encoders[self.target],
-                target=self.target,
-                dtype_dict=self.dtype_dict,
-                input_cols=self.input_cols,
-                timeseries_settings=self.problem_definition.timeseries_settings,
-            ),
-            LightGBM(
-                fit_on_dev=True,
-                stop_after=self.problem_definition.seconds_per_mixer,
-                target=self.target,
-                dtype_dict=self.dtype_dict,
-                input_cols=self.input_cols,
-            ),
-            Regression(
-                stop_after=self.problem_definition.seconds_per_mixer,
-                target=self.target,
-                dtype_dict=self.dtype_dict,
-                target_encoder=self.encoders[self.target],
-            ),
-        ]
-
-        # Train mixers
-        trained_mixers = []
-        for mixer in self.mixers:
-            try:
-                mixer.fit(encoded_train_data, encoded_dev_data)
-                trained_mixers.append(mixer)
-            except Exception as e:
-                log.warning(f"Exception: {e} when training mixer: {mixer}")
-                if True and mixer.stable:
-                    raise e
-
-        # Update mixers to trained versions
-        self.mixers = trained_mixers
-
-        # --------------- #
-        # Create Ensembles
-        # --------------- #
-        log.info("Ensembling the mixer")
-        # Create an ensemble of mixers to identify best performing model
-        self.pred_args = PredictionArguments()
-        self.ensemble = BestOf(
-            ts_analysis=None,
-            data=encoded_test_data,
-            accuracy_functions=self.accuracy_functions,
-            target=self.target,
-            mixers=self.mixers,
-        )
-        self.supports_proba = self.ensemble.supports_proba
-
-    def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None:
-        # Evaluate quality of fit for the ensemble of mixers
-
-        # --------------- #
-        # Extract data
-        # --------------- #
-        # Extract the featurized data into train/dev/test
-        encoded_train_data = enc_data["train"]
-        encoded_dev_data = enc_data["dev"]
-        encoded_test_data = enc_data["test"]
-
-        # --------------- #
-        # Analyze Ensembles
-        # --------------- #
-        log.info("Analyzing the ensemble of mixers")
-        self.model_analysis, self.runtime_analyzer = model_analyzer(
-            data=encoded_test_data,
-            train_data=encoded_train_data,
-            stats_info=self.statistical_analysis,
-            ts_cfg=self.problem_definition.timeseries_settings,
-            accuracy_functions=self.accuracy_functions,
-            predictor=self.ensemble,
-            target=self.target,
-            dtype_dict=self.dtype_dict,
-            analysis_blocks=self.analysis_blocks,
-        )
-
-    def learn(self, data: pd.DataFrame) -> None:
-        log.info(f"Dropping features: {self.problem_definition.ignore_features}")
-        data = data.drop(
-            columns=self.problem_definition.ignore_features, errors="ignore"
-        )
-
-        self.mode = "train"
-
-        # Perform stats analysis
-        self.analyze_data(data)
-
-        # Pre-process the data
-        clean_data = self.preprocess(data)
-
-        # Create train/test (dev) split
-        train_dev_test = self.split(clean_data)
-
-        # Prepare encoders
-        self.prepare(train_dev_test)
-
-        # Create feature vectors from data
-        enc_train_test = self.featurize(train_dev_test)
-
-        # Prepare mixers
-        self.fit(enc_train_test)
-
-        # Analyze the ensemble
-        self.analyze_ensemble(enc_train_test)
-
-        # ------------------------ #
-        # Enable model partial fit AFTER it is trained and evaluated for performance with the appropriate train/dev/test splits.
-        # This assumes the predictor could continuously evolve, hence including reserved testing data may improve predictions.
-        # SET `json_ai.problem_definition.fit_on_all=False` TO TURN THIS BLOCK OFF.
-
-        # Update the mixers with partial fit
-        if self.problem_definition.fit_on_all:
-
-            log.info("Adjustment on validation requested.")
-            update_data = {
-                "new": enc_train_test["test"],
-                "old": ConcatedEncodedDs(
-                    [enc_train_test["train"], enc_train_test["dev"]]
-                ),
-            }  # noqa
-
-            self.adjust(update_data)
-
-    def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None:
-        # Update mixers with new information
-
-        self.mode = "train"
-
-        # --------------- #
-        # Extract data
-        # --------------- #
-        # Extract the featurized data
-        encoded_old_data = new_data["old"]
-        encoded_new_data = new_data["new"]
-
-        # --------------- #
-        # Adjust (Update) Mixers
-        # --------------- #
-        log.info("Updating the mixers")
-
-        for mixer in self.mixers:
-            mixer.partial_fit(encoded_new_data, encoded_old_data)
-
-    def predict(self, data: pd.DataFrame, args: Dict = {}) -> pd.DataFrame:
-
-        # Remove columns that user specifies to ignore
-        log.info(f"Dropping features: {self.problem_definition.ignore_features}")
-        data = data.drop(
-            columns=self.problem_definition.ignore_features, errors="ignore"
-        )
-        for col in self.input_cols:
-            if col not in data.columns:
-                data[col] = [None] * len(data)
-
-        # Clean the data
-        self.mode = "predict"
-        log.info("Cleaning the data")
-        data = MyCustomCleaner.cleaner(
-            data=data,
-            identifiers=self.identifiers,
-            dtype_dict=self.dtype_dict,
-            target=self.target,
-            mode=self.mode,
-            timeseries_settings=self.problem_definition.timeseries_settings,
-            anomaly_detection=self.problem_definition.anomaly_detection,
-        )
-
-        # Featurize the data
-        encoded_ds = EncodedDs(self.encoders, data, self.target)
-        encoded_data = encoded_ds.get_encoded_data(include_target=False)
-
-        self.pred_args = PredictionArguments.from_dict(args)
-        df = self.ensemble(encoded_ds, args=self.pred_args)
-
-        if self.pred_args.all_mixers:
-            return df
-        else:
-            insights, global_insights = explain(
-                data=data,
-                encoded_data=encoded_data,
-                predictions=df,
-                ts_analysis=None,
-                timeseries_settings=self.problem_definition.timeseries_settings,
-                positive_domain=self.statistical_analysis.positive_domain,
-                anomaly_detection=self.problem_definition.anomaly_detection,
-                analysis=self.runtime_analyzer,
-                target_name=self.target,
-                target_dtype=self.dtype_dict[self.target],
-                explainer_blocks=self.analysis_blocks,
-                fixed_confidence=self.pred_args.fixed_confidence,
-                anomaly_error_rate=self.pred_args.anomaly_error_rate,
-                anomaly_cooldown=self.pred_args.anomaly_cooldown,
-            )
-            return insights
-
-
-
-

As you can see, an end-to-end pipeline of our entire ML procedure has been generating. There are several abstracted functions to enable transparency as to what processes your data goes through in order to build these models.

-

The key steps of the pipeline are as follows:

-
    -
  1. Run a statistical analysis with analyze_data

  2. -
  3. Clean your data with preprocess

  4. -
  5. Make a training/dev/testing split with split

  6. -
  7. Prepare your feature-engineering pipelines with prepare

  8. -
  9. Create your features with featurize

  10. -
  11. Fit your predictor models with fit

  12. -
-

You can customize this further if necessary, but you have all the steps necessary to train a model!

-

We recommend familiarizing with these steps by calling the above commands, ideally in order. Some commands (namely prepare, featurize, and fit) do depend on other steps.

-

If you want to omit the individual steps, we recommend your simply call the learn method, which compiles all the necessary steps implemented to give your fully trained predictive models starting with unprocessed data!

-
-
-

6) Call python to run your code and see your preprocessed outputs

-

Once we have code, we can turn this into a python object by calling predictor_from_code. This instantiates the PredictorInterface object.

-

This predictor object can be then used to run your pipeline.

-
-
[7]:
-
-
-
-# Turn the code above into a predictor object
-predictor = predictor_from_code(code)
-
-
-
-
-
-
-
-
-MyCustomCleaner.py
-MyCustomCleaner
-MyCustomSplitter.py
-MyCustomSplitter
-
-
-
-
[8]:
-
-
-
-# Pre-process the data
-cleaned_data = predictor.preprocess(data)
-
-cleaned_data.head()
-
-
-
-
-
-
-
-
-INFO:lightwood-50752:Cleaning the data
-INFO:lightwood-50752:Cleaning column =target
-INFO:lightwood-50752:Converted target into strictly non-negative
-INFO:lightwood-50752:Cleaning column =excerpt
-
-
-
-
[8]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
excerpttarget
0When young people returned ballroom, presented...0.000000
1All dinner time, Mrs. Fayre somewhat silent, e...0.000000
2As Roger predicted, snow departed quickly came...0.000000
3And outside palace great garden walled round, ...0.000000
4Once upon time Three Bears lived together hous...0.247197
-
-
-
-
[9]:
-
-
-
-print("\033[1m"  + "Original Data\n" + "\033[0m")
-print("Excerpt:\n", data.iloc[0]["excerpt"])
-print("\nTarget:\n", data.iloc[0]["target"])
-
-print("\033[1m"  + "\n\nCleaned Data\n" + "\033[0m")
-print("Excerpt:\n", cleaned_data.iloc[0]["excerpt"])
-print("\nTarget:\n", cleaned_data.iloc[0]["target"])
-
-
-
-
-
-
-
-
-Original Data
-
-Excerpt:
- When the young people returned to the ballroom, it presented a decidedly changed appearance. Instead of an interior scene, it was a winter landscape.
-The floor was covered with snow-white canvas, not laid on smoothly, but rumpled over bumps and hillocks, like a real snow field. The numerous palms and evergreens that had decorated the room, were powdered with flour and strewn with tufts of cotton, like snow. Also diamond dust had been lightly sprinkled on them, and glittering crystal icicles hung from the branches.
-At each end of the room, on the wall, hung a beautiful bear-skin rug.
-These rugs were for prizes, one for the girls and one for the boys. And this was the game.
-The girls were gathered at one end of the room and the boys at the other, and one end was called the North Pole, and the other the South Pole. Each player was given a small flag which they were to plant on reaching the Pole.
-This would have been an easy matter, but each traveller was obliged to wear snowshoes.
-
-Target:
- -0.340259125
-
-
-Cleaned Data
-
-Excerpt:
- When young people returned ballroom, presented decidedly changed appearance. Instead interior scene, winter landscape. The floor covered snow-white canvas, laid smoothly, rumpled bumps hillocks, like real snow field. The numerous palms evergreens decorated room, powdered flour strewn tufts cotton, like snow. Also diamond dust lightly sprinkled them, glittering crystal icicles hung branches. At end room, wall, hung beautiful bear-skin rug. These rugs prizes, one girls one boys. And game. The girls gathered one end room boys other, one end called North Pole, South Pole. Each player given small flag plant reaching Pole. This would easy matter, traveller obliged wear snowshoes.
-
-Target:
- 0.0
-
-
-

As you can see, the cleaning-process we introduced cut out the stop-words from the Excerpt, and enforced the target data to stay positive.

-

We hope this tutorial was informative on how to introduce a custom preprocessing method to your datasets! For more customization tutorials, please check our documentation.

-

If you want to download the Jupyter-notebook version of this tutorial, check out the source github location found here: lightwood/docssrc/source/tutorials/custom_cleaner.

-
-
-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/custom_cleaner/custom_cleaner.ipynb b/docs/tutorials/custom_cleaner/custom_cleaner.ipynb deleted file mode 100644 index 93e1d01ca..000000000 --- a/docs/tutorials/custom_cleaner/custom_cleaner.ipynb +++ /dev/null @@ -1,1290 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "regulated-manufacturer", - "metadata": {}, - "source": [ - "## Using your own pre-processing methods in Lightwood\n", - "\n", - "#### Date: 2021.10.07\n", - "\n", - "For the notebook below, we'll be exploring how to make **custom pre-processing** methods for our data. Lightwood has standard cleaning protocols to handle a variety of different data types, however, we want users to feel comfortable augmenting and addressing their own changes. To do so, we'll highlight the approach we would take below:\n", - "\n", - "\n", - "We will use data from [Kaggle](https://www.kaggle.com/c/commonlitreadabilityprize/data?select=train.csv). \n", - "\n", - "The data has several columns, but ultimately aims to use text to predict a *readability score*. There are also some columns that I do not want to use when making predictions, such as `url_legal`, `license`, among others.\n", - "\n", - "In this tutorial, we're going to focus on making changes to 2 columns: \n", - "(1) **excerpt**, a text column, and ensuring we remove stop words using NLTK.
\n", - "(2) **target**, the goal to predict; we will make this explicitly non-negative.\n", - "\n", - "Note, for this ACTUAL challenge, negative and positive are meaningful. We are using this as an example dataset to demonstrate how you can make changes to your underlying dataset and proceed to building powerful predictors.\n", - "\n", - "Let's get started!" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "happy-wheat", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import pandas as pd\n", - "import torch\n", - "import nltk\n", - "\n", - "import os\n", - "import sys\n", - "\n", - "# Lightwood modules\n", - "import lightwood as lw\n", - "from lightwood import ProblemDefinition, \\\n", - " JsonAI, \\\n", - " json_ai_from_problem, \\\n", - " code_from_json_ai, \\\n", - " predictor_from_code" - ] - }, - { - "cell_type": "markdown", - "id": "indie-chaos", - "metadata": {}, - "source": [ - "### 1) Load your data\n", - "\n", - "Lightwood uses `pandas` in order to handle datasets, as this is a very standard package in datascience. We can load our dataset using pandas in the following manner (make sure your data is in the data folder!)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "recognized-parish", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
idurl_legallicenseexcerpttargetstandard_error
0c12129c31NaNNaNWhen the young people returned to the ballroom...-0.3402590.464009
185aa80a4cNaNNaNAll through dinner time, Mrs. Fayre was somewh...-0.3153720.480805
2b69ac6792NaNNaNAs Roger had predicted, the snow departed as q...-0.5801180.476676
3dd1000b26NaNNaNAnd outside before the palace a great garden w...-1.0540130.450007
437c1b32fbNaNNaNOnce upon a time there were Three Bears who li...0.2471970.510845
\n", - "
" - ], - "text/plain": [ - " id url_legal license \\\n", - "0 c12129c31 NaN NaN \n", - "1 85aa80a4c NaN NaN \n", - "2 b69ac6792 NaN NaN \n", - "3 dd1000b26 NaN NaN \n", - "4 37c1b32fb NaN NaN \n", - "\n", - " excerpt target standard_error \n", - "0 When the young people returned to the ballroom... -0.340259 0.464009 \n", - "1 All through dinner time, Mrs. Fayre was somewh... -0.315372 0.480805 \n", - "2 As Roger had predicted, the snow departed as q... -0.580118 0.476676 \n", - "3 And outside before the palace a great garden w... -1.054013 0.450007 \n", - "4 Once upon a time there were Three Bears who li... 0.247197 0.510845 " - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Load the data\n", - "ddir = \"data/\"\n", - "filename = os.path.join(ddir, \"train.csv.zip\")\n", - "\n", - "data = pd.read_csv(filename)\n", - "data.head()" - ] - }, - { - "cell_type": "markdown", - "id": "official-wright", - "metadata": {}, - "source": [ - "We see **6 columns**, a variety which are numerical, missing numbers, text, and identifiers or \"ids\". For our predictive task, we are only interested in 2 such columns, the **excerpt** and **target** columns.\n", - "\n", - "### 2) Create a JSON-AI default object\n", - "Before we create a custom cleaner object, let's first create JSON-AI syntax for our problem based on its specifications. We can do so by setting up a ``ProblemDefinition``. The ``ProblemDefinition`` allows us to specify the target, the column we intend to predict, along with other details. \n", - "\n", - "The end goal of JSON-AI is to provide **a set of instructions on how to compile a machine learning pipeline*.\n", - "\n", - "In this case, let's specify our target, the aptly named **target** column. We will also tell JSON-AI to throw away features we never intend to use, such as \"url_legal\", \"license\", and \"standard_error\". We can do so in the following lines:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "chicken-truth", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-50752:Dropping features: ['url_legal', 'license', 'standard_error']\n", - "INFO:lightwood-50752:Analyzing a sample of 2478\n", - "INFO:lightwood-50752:from a total population of 2834, this is equivalent to 87.4% of your data.\n", - "INFO:lightwood-50752:Using 15 processes to deduct types.\n", - "INFO:lightwood-50752:Infering type for: id\n", - "INFO:lightwood-50752:Infering type for: target\n", - "INFO:lightwood-50752:Infering type for: excerpt\n", - "INFO:lightwood-50752:Column target has data type float\n", - "INFO:lightwood-50752:Doing text detection for column: id\n", - "INFO:lightwood-50752:Doing text detection for column: excerpt\n", - "INFO:lightwood-50752:Column id has data type categorical\n", - "WARNING:lightwood-50752:Column id is an identifier of type \"Hash-like identifier\"\n", - "INFO:lightwood-50752:Starting statistical analysis\n", - "INFO:lightwood-50752:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Setup the problem definition\n", - "problem_definition = {\n", - " 'target': 'target',\n", - " \"ignore_features\": [\"url_legal\", \"license\", \"standard_error\"]\n", - "}\n", - "\n", - "# Generate the j{ai}son syntax\n", - "default_json = json_ai_from_problem(data, problem_definition)\n" - ] - }, - { - "cell_type": "markdown", - "id": "needed-flashing", - "metadata": {}, - "source": [ - "Lightwood, as it processes the data, will provide the user a few pieces of information.\n", - "\n", - "(1) It drops the features we specify in the `ignore_features` argument
\n", - "(2) It takes a small sample of data from each column to *automatically infer the data type*
\n", - "(3) For each column that was not ignored, it identifies the most likely data type.
\n", - "(4) It notices that \"ID\" is a hash-like-identifier.
\n", - "(5) It conducts a small statistical analysis on the distributions in order to generate syntax.
\n", - "\n", - "As soon as you request a JSON-AI object, Lightwood automatically creates functional syntax from your data. You can see it as follows: " - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "designed-condition", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"features\": {\n", - " \"excerpt\": {\n", - " \"encoder\": {\n", - " \"module\": \"Rich_Text.PretrainedLangEncoder\",\n", - " \"args\": {\n", - " \"output_type\": \"$dtype_dict[$target]\",\n", - " \"stop_after\": \"$problem_definition.seconds_per_encoder\"\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"outputs\": {\n", - " \"target\": {\n", - " \"data_dtype\": \"float\",\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {\n", - " \"is_target\": \"True\",\n", - " \"positive_domain\": \"$statistical_analysis.positive_domain\"\n", - " }\n", - " },\n", - " \"mixers\": [\n", - " {\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": true,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"LightGBM\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"fit_on_dev\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"Regression\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\"\n", - " }\n", - " }\n", - " ],\n", - " \"ensemble\": {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " \"ts_analysis\": null\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"problem_definition\": {\n", - " \"target\": \"target\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": true,\n", - " \"seconds_per_mixer\": 1582,\n", - " \"seconds_per_encoder\": 12749,\n", - " \"time_aim\": 7780.458037514903,\n", - " \"target_weights\": null,\n", - " \"positive_domain\": false,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": false,\n", - " \"order_by\": null,\n", - " \"window\": null,\n", - " \"group_by\": null,\n", - " \"use_previous_target\": true,\n", - " \"nr_predictions\": null,\n", - " \"historical_columns\": null,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": false\n", - " },\n", - " \"anomaly_detection\": true,\n", - " \"ignore_features\": [\n", - " \"url_legal\",\n", - " \"license\",\n", - " \"standard_error\"\n", - " ],\n", - " \"fit_on_all\": true,\n", - " \"strict_mode\": true,\n", - " \"seed_nr\": 420\n", - " },\n", - " \"identifiers\": {\n", - " \"id\": \"Hash-like identifier\"\n", - " },\n", - " \"accuracy_functions\": [\n", - " \"r2_score\"\n", - " ]\n", - "}\n" - ] - } - ], - "source": [ - "print(default_json.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "level-vacation", - "metadata": {}, - "source": [ - "The above shows the minimal syntax required to create a functional JSON-AI object. For each feature you consider in the dataset, we specify the name of the feature, the type of encoder (feature-engineering method) to process the feature, and key word arguments to process the encoder. For the output, we perform a similar operation, but specify the types of mixers, or algorithms used in making a predictor that can estimate the target. Lastly, we populate the \"problem_definition\" key with the ingredients for our ML pipeline.\n", - "\n", - "These are the only elements required to get off the ground with JSON-AI. However, we're interested in making a *custom* approach. So, let's make this syntax a file, and introduce our own changes." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "damaged-fluid", - "metadata": {}, - "outputs": [], - "source": [ - "with open(\"default.json\", \"w\") as fp:\n", - " fp.write(default_json.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "integrated-entrepreneur", - "metadata": {}, - "source": [ - "### 3) Build your own cleaner module\n", - "\n", - "Let's make a file called `MyCustomCleaner.py`. To write this file, we will use `lightwood.data.cleaner.cleaner` as inspiration.\n", - "\n", - "The goal output of the cleaner is to provide pre-processing to your dataset - the output is only a pandas DataFrame. In theory, any pre-processing can be done here. However, data can be highly irregular - our default `Cleaner` function has several main goals:\n", - "\n", - "(1) Strip away any identifier, etc. unwanted columns
\n", - "(2) Apply a cleaning function to each column in the dataset, according to that column's data type
\n", - "(3) Standardize NaN values within each column for appropriate downstream treatment
\n", - "\n", - "You can choose to omit many of these details and completely write this module from scratch, but the easiest way to introduce your custom changes is to borrow the `Cleaner` function, and add core changes in a custom block.\n", - "\n", - "This can be done as follows\n", - "\n", - "\n", - "You can see individual cleaning functions in `lightwood.data.cleaner`. If you want to entirely replace a cleaning technique given a particular data-type, we invite you to change `lightwood.data.cleaner.get_cleaning_func` using the argument `custom_cleaning_functions`; in this dictionary, for a datatype (specified in `api.dtype`), you can assign your own function to override our defaults. " - ] - }, - { - "cell_type": "markdown", - "id": "front-preview", - "metadata": {}, - "source": [ - "```\n", - "import re\n", - "from copy import deepcopy\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "\n", - "# For time-series\n", - "import datetime\n", - "from dateutil.parser import parse as parse_dt\n", - "\n", - "from lightwood.api.dtype import dtype\n", - "from lightwood.helpers import text\n", - "from lightwood.helpers.log import log\n", - "from lightwood.api.types import TimeseriesSettings\n", - "from lightwood.helpers.numeric import can_be_nan_numeric\n", - "\n", - "# Import NLTK for stopwords\n", - "import nltk\n", - "from nltk.corpus import stopwords\n", - "\n", - "stop_words = set(stopwords.words(\"english\"))\n", - "\n", - "from typing import Dict, List, Optional, Tuple, Callable, Union\n", - "\n", - "# Borrow functions from Lightwood's cleaner\n", - "from lightwood.data.cleaner import (\n", - " _remove_columns,\n", - " _get_columns_to_clean,\n", - " get_cleaning_func,\n", - ")\n", - "\n", - "# Use for standardizing NaNs\n", - "VALUES_FOR_NAN_AND_NONE_IN_PANDAS = [np.nan, \"nan\", \"NaN\", \"Nan\", \"None\"]\n", - "\n", - "\n", - "def cleaner(\n", - " data: pd.DataFrame,\n", - " dtype_dict: Dict[str, str],\n", - " identifiers: Dict[str, str],\n", - " target: str,\n", - " mode: str,\n", - " timeseries_settings: TimeseriesSettings,\n", - " anomaly_detection: bool,\n", - " custom_cleaning_functions: Dict[str, str] = {},\n", - ") -> pd.DataFrame:\n", - " \"\"\"\n", - " The cleaner is a function which takes in the raw data, plus additional information about it's types and about the problem. Based on this it generates a \"clean\" representation of the data, where each column has an ideal standardized type and all malformed or otherwise missing or invalid elements are turned into ``None``\n", - "\n", - " :param data: The raw data\n", - " :param dtype_dict: Type information for each column\n", - " :param identifiers: A dict containing all identifier typed columns\n", - " :param target: The target columns\n", - " :param mode: Can be \"predict\" or \"train\"\n", - " :param timeseries_settings: Timeseries related settings, only relevant for timeseries predictors, otherwise can be the default object\n", - " :param anomaly_detection: Are we detecting anomalies with this predictor?\n", - "\n", - " :returns: The cleaned data\n", - " \"\"\" # noqa\n", - "\n", - " data = _remove_columns(\n", - " data,\n", - " identifiers,\n", - " target,\n", - " mode,\n", - " timeseries_settings,\n", - " anomaly_detection,\n", - " dtype_dict,\n", - " )\n", - "\n", - " for col in _get_columns_to_clean(data, dtype_dict, mode, target):\n", - "\n", - " log.info(\"Cleaning column =\" + str(col))\n", - " # Get and apply a cleaning function for each data type\n", - " # If you want to customize the cleaner, it's likely you can to modify ``get_cleaning_func``\n", - " data[col] = data[col].apply(\n", - " get_cleaning_func(dtype_dict[col], custom_cleaning_functions)\n", - " )\n", - "\n", - " # ------------------------ #\n", - " # INTRODUCE YOUR CUSTOM BLOCK\n", - "\n", - " # If column data type is a text type, remove stop-words\n", - " if dtype_dict[col] in (dtype.rich_text, dtype.short_text):\n", - " data[col] = data[col].apply(\n", - " lambda x: \" \".join(\n", - " [word for word in x.split() if word not in stop_words]\n", - " )\n", - " )\n", - "\n", - " # Enforce numerical columns as non-negative\n", - " if dtype_dict[col] in (dtype.integer, dtype.float):\n", - " log.info(\"Converted \" + str(col) + \" into strictly non-negative\")\n", - " data[col] = data[col].apply(lambda x: x if x > 0 else 0.0)\n", - "\n", - " # ------------------------ #\n", - " data[col] = data[col].replace(\n", - " to_replace=VALUES_FOR_NAN_AND_NONE_IN_PANDAS, value=None\n", - " )\n", - "\n", - " return data\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "radical-armenia", - "metadata": {}, - "source": [ - "#### Place your custom module in `~/lightwood_modules`\n", - "\n", - "We automatically search for custom scripts in your `~/lightwood_modules` path. Place your file there. Later, you'll see when we autogenerate code, that you can change your import location if you choose." - ] - }, - { - "cell_type": "markdown", - "id": "characteristic-promotion", - "metadata": {}, - "source": [ - "### 4) Introduce your custom cleaner in JSON-AI\n", - "\n", - "Now let's introduce our custom cleaner. JSON-AI keeps a lightweight syntax but fills in many default modules (like splitting, cleaning).\n", - "\n", - "For the custom cleaner, we'll work by editing the \"cleaner\" key. We will change properties within it as follows:\n", - "(1) \"module\" - place the name of the function. In our case it will be \"MyCustomCleaner.cleaner\"\n", - "(2) \"args\" - any keyword argument specific to your cleaner's internals. \n", - "\n", - "This will look as follows:\n", - "```\n", - " \"cleaner\": {\n", - " \"module\": \"MyCustomCleaner.cleaner\",\n", - " \"args\": {\n", - " \"identifiers\": \"$identifiers\",\n", - " \"data\": \"data\",\n", - " \"dtype_dict\": \"$dtype_dict\",\n", - " \"target\": \"$target\",\n", - " \"mode\": \"$mode\",\n", - " \"timeseries_settings\": \"$problem_definition.timeseries_settings\",\n", - " \"anomaly_detection\": \"$problem_definition.anomaly_detection\"\n", - " }\n", - "```\n", - "\n", - "You may be wondering what the \"$\" variables reference. In certain cases, we'd like JSON-AI to auto-fill internal variables when automatically generating code, for example, we've already specified the \"target\" - it would be easier to simply refer in a modular sense what that term is. That is what these variables represent.\n", - "\n", - "As we borrowed most of the default `Cleaner`; we keep these arguments. In theory, if we were writing much of these details from scratch, we can customize these values as necessary." - ] - }, - { - "cell_type": "markdown", - "id": "respiratory-radiation", - "metadata": {}, - "source": [ - "### 5) Generate Python code representing your ML pipeline\n", - "\n", - "Now we're ready to load up our custom JSON-AI and generate the predictor code!\n", - "\n", - "We can do this by first reading in our custom json-syntax, and then calling the function `code_from_json_ai`. " - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "floating-patent", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n", - "import lightwood\n", - "from lightwood.analysis import *\n", - "from lightwood.api import *\n", - "from lightwood.data import *\n", - "from lightwood.encoder import *\n", - "from lightwood.ensemble import *\n", - "from lightwood.helpers.device import *\n", - "from lightwood.helpers.general import *\n", - "from lightwood.helpers.log import *\n", - "from lightwood.helpers.numeric import *\n", - "from lightwood.helpers.parallelism import *\n", - "from lightwood.helpers.seed import *\n", - "from lightwood.helpers.text import *\n", - "from lightwood.helpers.torch import *\n", - "from lightwood.mixer import *\n", - "import pandas as pd\n", - "from typing import Dict, List\n", - "import os\n", - "from types import ModuleType\n", - "import importlib.machinery\n", - "import sys\n", - "\n", - "\n", - "for import_dir in [os.path.expanduser(\"~/lightwood_modules\"), \"/etc/lightwood_modules\"]:\n", - " if os.path.exists(import_dir) and os.access(import_dir, os.R_OK):\n", - " for file_name in list(os.walk(import_dir))[0][2]:\n", - " print(file_name)\n", - " if file_name[-3:] != \".py\":\n", - " continue\n", - " mod_name = file_name[:-3]\n", - " print(mod_name)\n", - " loader = importlib.machinery.SourceFileLoader(\n", - " mod_name, os.path.join(import_dir, file_name)\n", - " )\n", - " module = ModuleType(loader.name)\n", - " loader.exec_module(module)\n", - " sys.modules[mod_name] = module\n", - " exec(f\"import {mod_name}\")\n", - "\n", - "\n", - "class Predictor(PredictorInterface):\n", - " target: str\n", - " mixers: List[BaseMixer]\n", - " encoders: Dict[str, BaseEncoder]\n", - " ensemble: BaseEnsemble\n", - " mode: str\n", - "\n", - " def __init__(self):\n", - " seed(420)\n", - " self.target = \"target\"\n", - " self.mode = \"inactive\"\n", - " self.problem_definition = ProblemDefinition.from_dict(\n", - " {\n", - " \"target\": \"target\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": True,\n", - " \"seconds_per_mixer\": 1582,\n", - " \"seconds_per_encoder\": 12749,\n", - " \"time_aim\": 7780.458037514903,\n", - " \"target_weights\": None,\n", - " \"positive_domain\": False,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": False,\n", - " \"order_by\": None,\n", - " \"window\": None,\n", - " \"group_by\": None,\n", - " \"use_previous_target\": True,\n", - " \"nr_predictions\": None,\n", - " \"historical_columns\": None,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": False,\n", - " },\n", - " \"anomaly_detection\": True,\n", - " \"ignore_features\": [\"url_legal\", \"license\", \"standard_error\"],\n", - " \"fit_on_all\": True,\n", - " \"strict_mode\": True,\n", - " \"seed_nr\": 420,\n", - " }\n", - " )\n", - " self.accuracy_functions = [\"r2_score\"]\n", - " self.identifiers = {\"id\": \"Hash-like identifier\"}\n", - " self.dtype_dict = {\"target\": \"float\", \"excerpt\": \"rich_text\"}\n", - "\n", - " # Any feature-column dependencies\n", - " self.dependencies = {\"excerpt\": []}\n", - "\n", - " self.input_cols = [\"excerpt\"]\n", - "\n", - " # Initial stats analysis\n", - " self.statistical_analysis = None\n", - "\n", - " def analyze_data(self, data: pd.DataFrame) -> None:\n", - " # Perform a statistical analysis on the unprocessed data\n", - "\n", - " log.info(\"Performing statistical analysis on data\")\n", - " self.statistical_analysis = lightwood.data.statistical_analysis(\n", - " data,\n", - " self.dtype_dict,\n", - " {\"id\": \"Hash-like identifier\"},\n", - " self.problem_definition,\n", - " )\n", - "\n", - " # Instantiate post-training evaluation\n", - " self.analysis_blocks = [\n", - " ICP(\n", - " fixed_significance=None,\n", - " confidence_normalizer=False,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " ),\n", - " AccStats(deps=[\"ICP\"]),\n", - " GlobalFeatureImportance(disable_column_importance=False),\n", - " ]\n", - "\n", - " def preprocess(self, data: pd.DataFrame) -> pd.DataFrame:\n", - " # Preprocess and clean data\n", - "\n", - " log.info(\"Cleaning the data\")\n", - " data = MyCustomCleaner.cleaner(\n", - " data=data,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Time-series blocks\n", - "\n", - " return data\n", - "\n", - " def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]:\n", - " # Split the data into training/testing splits\n", - "\n", - " log.info(\"Splitting the data into train/test\")\n", - " train_test_data = splitter(\n", - " data=data,\n", - " seed=1,\n", - " pct_train=80,\n", - " pct_dev=10,\n", - " pct_test=10,\n", - " tss=self.problem_definition.timeseries_settings,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " )\n", - "\n", - " return train_test_data\n", - "\n", - " def prepare(self, data: Dict[str, pd.DataFrame]) -> None:\n", - " # Prepare encoders to featurize data\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " if self.statistical_analysis is None:\n", - " raise Exception(\"Please run analyze_data first\")\n", - "\n", - " # Column to encoder mapping\n", - " self.encoders = {\n", - " \"target\": Float.NumericEncoder(\n", - " is_target=True,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " ),\n", - " \"excerpt\": Rich_Text.PretrainedLangEncoder(\n", - " output_type=False,\n", - " stop_after=self.problem_definition.seconds_per_encoder,\n", - " ),\n", - " }\n", - "\n", - " # Prepare the training + dev data\n", - " concatenated_train_dev = pd.concat([data[\"train\"], data[\"dev\"]])\n", - "\n", - " log.info(\"Preparing the encoders\")\n", - "\n", - " encoder_prepping_dict = {}\n", - "\n", - " # Prepare encoders that do not require learned strategies\n", - " for col_name, encoder in self.encoders.items():\n", - " if not encoder.is_trainable_encoder:\n", - " encoder_prepping_dict[col_name] = [\n", - " encoder,\n", - " concatenated_train_dev[col_name],\n", - " \"prepare\",\n", - " ]\n", - " log.info(\n", - " f\"Encoder prepping dict length of: {len(encoder_prepping_dict)}\"\n", - " )\n", - "\n", - " # Setup parallelization\n", - " parallel_prepped_encoders = mut_method_call(encoder_prepping_dict)\n", - " for col_name, encoder in parallel_prepped_encoders.items():\n", - " self.encoders[col_name] = encoder\n", - "\n", - " # Prepare the target\n", - " if self.target not in parallel_prepped_encoders:\n", - " if self.encoders[self.target].is_trainable_encoder:\n", - " self.encoders[self.target].prepare(\n", - " data[\"train\"][self.target], data[\"dev\"][self.target]\n", - " )\n", - " else:\n", - " self.encoders[self.target].prepare(\n", - " pd.concat([data[\"train\"], data[\"dev\"]])[self.target]\n", - " )\n", - "\n", - " # Prepare any non-target encoders that are learned\n", - " for col_name, encoder in self.encoders.items():\n", - " if encoder.is_trainable_encoder:\n", - " priming_data = pd.concat([data[\"train\"], data[\"dev\"]])\n", - " kwargs = {}\n", - " if self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"] = {}\n", - " for col in self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"][col] = {\n", - " \"original_type\": self.dtype_dict[col],\n", - " \"data\": priming_data[col],\n", - " }\n", - "\n", - " # If an encoder representation requires the target, provide priming data\n", - " if hasattr(encoder, \"uses_target\"):\n", - " kwargs[\"encoded_target_values\"] = parallel_prepped_encoders[\n", - " self.target\n", - " ].encode(priming_data[self.target])\n", - "\n", - " encoder.prepare(\n", - " data[\"train\"][col_name], data[\"dev\"][col_name], **kwargs\n", - " )\n", - "\n", - " def featurize(self, split_data: Dict[str, pd.DataFrame]):\n", - " # Featurize data into numerical representations for models\n", - "\n", - " log.info(\"Featurizing the data\")\n", - " feature_data = {key: None for key in split_data.keys()}\n", - "\n", - " for key, data in split_data.items():\n", - " feature_data[key] = EncodedDs(self.encoders, data, self.target)\n", - "\n", - " return feature_data\n", - "\n", - " def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Fit predictors to estimate target\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " log.info(\"Training the mixers\")\n", - "\n", - " # --------------- #\n", - " # Fit Models\n", - " # --------------- #\n", - " # Assign list of mixers\n", - " self.mixers = [\n", - " Neural(\n", - " fit_on_dev=True,\n", - " search_hyperparameters=True,\n", - " net=\"DefaultNet\",\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target_encoder=self.encoders[self.target],\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " ),\n", - " LightGBM(\n", - " fit_on_dev=True,\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " ),\n", - " Regression(\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " target_encoder=self.encoders[self.target],\n", - " ),\n", - " ]\n", - "\n", - " # Train mixers\n", - " trained_mixers = []\n", - " for mixer in self.mixers:\n", - " try:\n", - " mixer.fit(encoded_train_data, encoded_dev_data)\n", - " trained_mixers.append(mixer)\n", - " except Exception as e:\n", - " log.warning(f\"Exception: {e} when training mixer: {mixer}\")\n", - " if True and mixer.stable:\n", - " raise e\n", - "\n", - " # Update mixers to trained versions\n", - " self.mixers = trained_mixers\n", - "\n", - " # --------------- #\n", - " # Create Ensembles\n", - " # --------------- #\n", - " log.info(\"Ensembling the mixer\")\n", - " # Create an ensemble of mixers to identify best performing model\n", - " self.pred_args = PredictionArguments()\n", - " self.ensemble = BestOf(\n", - " ts_analysis=None,\n", - " data=encoded_test_data,\n", - " accuracy_functions=self.accuracy_functions,\n", - " target=self.target,\n", - " mixers=self.mixers,\n", - " )\n", - " self.supports_proba = self.ensemble.supports_proba\n", - "\n", - " def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Evaluate quality of fit for the ensemble of mixers\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " # --------------- #\n", - " # Analyze Ensembles\n", - " # --------------- #\n", - " log.info(\"Analyzing the ensemble of mixers\")\n", - " self.model_analysis, self.runtime_analyzer = model_analyzer(\n", - " data=encoded_test_data,\n", - " train_data=encoded_train_data,\n", - " stats_info=self.statistical_analysis,\n", - " ts_cfg=self.problem_definition.timeseries_settings,\n", - " accuracy_functions=self.accuracy_functions,\n", - " predictor=self.ensemble,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " analysis_blocks=self.analysis_blocks,\n", - " )\n", - "\n", - " def learn(self, data: pd.DataFrame) -> None:\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # Perform stats analysis\n", - " self.analyze_data(data)\n", - "\n", - " # Pre-process the data\n", - " clean_data = self.preprocess(data)\n", - "\n", - " # Create train/test (dev) split\n", - " train_dev_test = self.split(clean_data)\n", - "\n", - " # Prepare encoders\n", - " self.prepare(train_dev_test)\n", - "\n", - " # Create feature vectors from data\n", - " enc_train_test = self.featurize(train_dev_test)\n", - "\n", - " # Prepare mixers\n", - " self.fit(enc_train_test)\n", - "\n", - " # Analyze the ensemble\n", - " self.analyze_ensemble(enc_train_test)\n", - "\n", - " # ------------------------ #\n", - " # Enable model partial fit AFTER it is trained and evaluated for performance with the appropriate train/dev/test splits.\n", - " # This assumes the predictor could continuously evolve, hence including reserved testing data may improve predictions.\n", - " # SET `json_ai.problem_definition.fit_on_all=False` TO TURN THIS BLOCK OFF.\n", - "\n", - " # Update the mixers with partial fit\n", - " if self.problem_definition.fit_on_all:\n", - "\n", - " log.info(\"Adjustment on validation requested.\")\n", - " update_data = {\n", - " \"new\": enc_train_test[\"test\"],\n", - " \"old\": ConcatedEncodedDs(\n", - " [enc_train_test[\"train\"], enc_train_test[\"dev\"]]\n", - " ),\n", - " } # noqa\n", - "\n", - " self.adjust(update_data)\n", - "\n", - " def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Update mixers with new information\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data\n", - " encoded_old_data = new_data[\"old\"]\n", - " encoded_new_data = new_data[\"new\"]\n", - "\n", - " # --------------- #\n", - " # Adjust (Update) Mixers\n", - " # --------------- #\n", - " log.info(\"Updating the mixers\")\n", - "\n", - " for mixer in self.mixers:\n", - " mixer.partial_fit(encoded_new_data, encoded_old_data)\n", - "\n", - " def predict(self, data: pd.DataFrame, args: Dict = {}) -> pd.DataFrame:\n", - "\n", - " # Remove columns that user specifies to ignore\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - " for col in self.input_cols:\n", - " if col not in data.columns:\n", - " data[col] = [None] * len(data)\n", - "\n", - " # Clean the data\n", - " self.mode = \"predict\"\n", - " log.info(\"Cleaning the data\")\n", - " data = MyCustomCleaner.cleaner(\n", - " data=data,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Featurize the data\n", - " encoded_ds = EncodedDs(self.encoders, data, self.target)\n", - " encoded_data = encoded_ds.get_encoded_data(include_target=False)\n", - "\n", - " self.pred_args = PredictionArguments.from_dict(args)\n", - " df = self.ensemble(encoded_ds, args=self.pred_args)\n", - "\n", - " if self.pred_args.all_mixers:\n", - " return df\n", - " else:\n", - " insights, global_insights = explain(\n", - " data=data,\n", - " encoded_data=encoded_data,\n", - " predictions=df,\n", - " ts_analysis=None,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " analysis=self.runtime_analyzer,\n", - " target_name=self.target,\n", - " target_dtype=self.dtype_dict[self.target],\n", - " explainer_blocks=self.analysis_blocks,\n", - " fixed_confidence=self.pred_args.fixed_confidence,\n", - " anomaly_error_rate=self.pred_args.anomaly_error_rate,\n", - " anomaly_cooldown=self.pred_args.anomaly_cooldown,\n", - " )\n", - " return insights\n", - "\n" - ] - } - ], - "source": [ - "# Make changes to your JSON-file and load the custom version\n", - "with open('custom.json', 'r') as fp:\n", - " modified_json = JsonAI.from_json(fp.read())\n", - "\n", - "#Generate python code that fills in your pipeline\n", - "code = code_from_json_ai(modified_json)\n", - "\n", - "print(code)\n", - "\n", - "# Save code to a file (Optional)\n", - "with open('custom_cleaner_pipeline.py', 'w') as fp:\n", - " fp.write(code)" - ] - }, - { - "cell_type": "markdown", - "id": "handled-oasis", - "metadata": {}, - "source": [ - "As you can see, an end-to-end pipeline of our entire ML procedure has been generating. There are several abstracted functions to enable transparency as to what processes your data goes through in order to build these models.\n", - "\n", - "The key steps of the pipeline are as follows:\n", - "\n", - "(1) Run a **statistical analysis** with `analyze_data`
\n", - "(2) Clean your data with `preprocess`
\n", - "(3) Make a training/dev/testing split with `split`
\n", - "(4) Prepare your feature-engineering pipelines with `prepare`
\n", - "(5) Create your features with `featurize`
\n", - "(6) Fit your predictor models with `fit`
\n", - "\n", - "You can customize this further if necessary, but you have all the steps necessary to train a model!\n", - "\n", - "We recommend familiarizing with these steps by calling the above commands, ideally in order. Some commands (namely `prepare`, `featurize`, and `fit`) do depend on other steps.\n", - "\n", - "If you want to omit the individual steps, we recommend your simply call the `learn` method, which compiles all the necessary steps implemented to give your fully trained predictive models starting with unprocessed data! " - ] - }, - { - "cell_type": "markdown", - "id": "meaning-saskatchewan", - "metadata": {}, - "source": [ - "### 6) Call python to run your code and see your preprocessed outputs\n", - "\n", - "Once we have code, we can turn this into a python object by calling `predictor_from_code`. This instantiates the `PredictorInterface` object. \n", - "\n", - "This predictor object can be then used to run your pipeline." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "violent-guard", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Turn the code above into a predictor object\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "closing-episode", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-50752:Cleaning the data\n", - "INFO:lightwood-50752:Cleaning column =target\n", - "INFO:lightwood-50752:Converted target into strictly non-negative\n", - "INFO:lightwood-50752:Cleaning column =excerpt\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
excerpttarget
0When young people returned ballroom, presented...0.000000
1All dinner time, Mrs. Fayre somewhat silent, e...0.000000
2As Roger predicted, snow departed quickly came...0.000000
3And outside palace great garden walled round, ...0.000000
4Once upon time Three Bears lived together hous...0.247197
\n", - "
" - ], - "text/plain": [ - " excerpt target\n", - "0 When young people returned ballroom, presented... 0.000000\n", - "1 All dinner time, Mrs. Fayre somewhat silent, e... 0.000000\n", - "2 As Roger predicted, snow departed quickly came... 0.000000\n", - "3 And outside palace great garden walled round, ... 0.000000\n", - "4 Once upon time Three Bears lived together hous... 0.247197" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Pre-process the data\n", - "cleaned_data = predictor.preprocess(data)\n", - "\n", - "cleaned_data.head()" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "major-stake", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[1mOriginal Data\n", - "\u001b[0m\n", - "Excerpt:\n", - " When the young people returned to the ballroom, it presented a decidedly changed appearance. Instead of an interior scene, it was a winter landscape.\n", - "The floor was covered with snow-white canvas, not laid on smoothly, but rumpled over bumps and hillocks, like a real snow field. The numerous palms and evergreens that had decorated the room, were powdered with flour and strewn with tufts of cotton, like snow. Also diamond dust had been lightly sprinkled on them, and glittering crystal icicles hung from the branches.\n", - "At each end of the room, on the wall, hung a beautiful bear-skin rug.\n", - "These rugs were for prizes, one for the girls and one for the boys. And this was the game.\n", - "The girls were gathered at one end of the room and the boys at the other, and one end was called the North Pole, and the other the South Pole. Each player was given a small flag which they were to plant on reaching the Pole.\n", - "This would have been an easy matter, but each traveller was obliged to wear snowshoes.\n", - "\n", - "Target:\n", - " -0.340259125\n", - "\u001b[1m\n", - "\n", - "Cleaned Data\n", - "\u001b[0m\n", - "Excerpt:\n", - " When young people returned ballroom, presented decidedly changed appearance. Instead interior scene, winter landscape. The floor covered snow-white canvas, laid smoothly, rumpled bumps hillocks, like real snow field. The numerous palms evergreens decorated room, powdered flour strewn tufts cotton, like snow. Also diamond dust lightly sprinkled them, glittering crystal icicles hung branches. At end room, wall, hung beautiful bear-skin rug. These rugs prizes, one girls one boys. And game. The girls gathered one end room boys other, one end called North Pole, South Pole. Each player given small flag plant reaching Pole. This would easy matter, traveller obliged wear snowshoes.\n", - "\n", - "Target:\n", - " 0.0\n" - ] - } - ], - "source": [ - "print(\"\\033[1m\" + \"Original Data\\n\" + \"\\033[0m\")\n", - "print(\"Excerpt:\\n\", data.iloc[0][\"excerpt\"])\n", - "print(\"\\nTarget:\\n\", data.iloc[0][\"target\"])\n", - "\n", - "print(\"\\033[1m\" + \"\\n\\nCleaned Data\\n\" + \"\\033[0m\")\n", - "print(\"Excerpt:\\n\", cleaned_data.iloc[0][\"excerpt\"])\n", - "print(\"\\nTarget:\\n\", cleaned_data.iloc[0][\"target\"])" - ] - }, - { - "cell_type": "markdown", - "id": "celtic-scientist", - "metadata": {}, - "source": [ - "As you can see, the cleaning-process we introduced cut out the stop-words from the Excerpt, and enforced the target data to stay positive.\n", - "\n", - "We hope this tutorial was informative on how to introduce a **custom preprocessing method** to your datasets! For more customization tutorials, please check our [documentation](https://lightwood.io/tutorials.html).\n", - "\n", - "If you want to download the Jupyter-notebook version of this tutorial, check out the source github location found here: `lightwood/docssrc/source/tutorials/custom_cleaner`. " - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.html b/docs/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.html deleted file mode 100644 index 3cecdbc39..000000000 --- a/docs/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.html +++ /dev/null @@ -1,1202 +0,0 @@ - - - - - - - - - - Custom Encoder: Rule-Based — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Custom Encoder: Rule-Based

-

Lightwood uses “Encoders” to convert preprocessed (cleaned) data into features. Encoders represent the feature engineering step of the data science pipeline; they can either have a set of instructions (“rule-based”) or a learned representation (trained on data).

-

In the following notebook, we will experiment with creating a custom encoder that creates Label Encoding.

-

For example, imagine we have the following set of categories:

-
MyColumnData = ["apple", "orange", "orange", "banana", "apple", "dragonfruit"]
-
-
-

There are 4 categories to consider: “apple”, “banana”, “orange”, and “dragonfruit”.

-

Label encoding allows you to refer to these categories as if they were numbers. For example, consider the mapping (arranged alphabetically):

-

1 - apple 2 - banana 3 - dragonfruit 4 - orange

-

Using this mapping, we can convert the above data as follows:

-
MyFeatureData = [1, 4, 4, 2, 1, 3]
-
-
-

In the following notebook, we will design a LabelEncoder for Lightwood for use on categorical data. We will be using the Kaggle “Used Car” dataset. We’ve provided a link for you to automatically access this CSV. This dataset describes various details of cars on sale - with the goal of predicting how much this car may sell for.

-

Let’s get started.

-
-
[1]:
-
-
-
-import pandas as pd
-
-# Lightwood modules
-import lightwood as lw
-from lightwood import ProblemDefinition, \
-                      JsonAI, \
-                      json_ai_from_problem, \
-                      code_from_json_ai, \
-                      predictor_from_code
-
-
-
-
-
-

1) Load your data

-

Lightwood works with pandas.DataFrames; load data via pandas as follows:

-
-
[2]:
-
-
-
-filename = 'https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/used_car_price/data.csv'
-df = pd.read_csv(filename)
-df.head()
-
-
-
-
-
[2]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
modelyearpricetransmissionmileagefuelTypetaxmpgengineSize
0A1201712500Manual15735Petrol15055.41.4
1A6201616500Automatic36203Diesel2064.22.0
2A1201611000Manual29946Petrol3055.41.4
3A4201716800Automatic25952Diesel14567.32.0
4A3201917300Manual1998Petrol14549.61.0
-
-
-

We can see a handful of columns above, such as model, year, price, transmission, mileage, fuelType, tax, mpg, engineSize. Some columns are numerical whereas others are categorical. We are going to specifically only focus on categorical columns.

-
-
-

2) Generate JSON-AI Syntax

-

We will make a LabelEncoder as follows:

-
    -
  1. Find all unique examples within a column

  2. -
  3. Order the examples in a consistent way

  4. -
  5. Label (python-index of 0 as start) each category

  6. -
  7. Assign the label according to each datapoint.

  8. -
-

First, let’s generate a JSON-AI syntax so we can automatically identify each column.

-
-
[3]:
-
-
-
-# Create the Problem Definition
-pdef = ProblemDefinition.from_dict({
-    'target': 'price', # column you want to predict
-    #'ignore_features': ['year', 'mileage', 'tax', 'mpg', 'engineSize']
-})
-
-# Generate a JSON-AI object
-json_ai = json_ai_from_problem(df, problem_definition=pdef)
-
-
-
-
-
-
-
-
-INFO:lightwood-53258:Dropping features: []
-INFO:lightwood-53258:Analyzing a sample of 6920
-INFO:lightwood-53258:from a total population of 10668, this is equivalent to 64.9% of your data.
-INFO:lightwood-53258:Using 15 processes to deduct types.
-INFO:lightwood-53258:Infering type for: year
-INFO:lightwood-53258:Infering type for: model
-INFO:lightwood-53258:Infering type for: price
-INFO:lightwood-53258:Infering type for: mileage
-INFO:lightwood-53258:Infering type for: transmission
-INFO:lightwood-53258:Infering type for: fuelType
-INFO:lightwood-53258:Infering type for: tax
-INFO:lightwood-53258:Infering type for: mpg
-INFO:lightwood-53258:Infering type for: engineSize
-INFO:lightwood-53258:Column year has data type integer
-INFO:lightwood-53258:Column tax has data type integer
-INFO:lightwood-53258:Column price has data type integer
-INFO:lightwood-53258:Column mileage has data type integer
-INFO:lightwood-53258:Column engineSize has data type float
-INFO:lightwood-53258:Column mpg has data type float
-INFO:lightwood-53258:Column fuelType has data type categorical
-INFO:lightwood-53258:Column transmission has data type categorical
-INFO:lightwood-53258:Column model has data type categorical
-INFO:lightwood-53258:Starting statistical analysis
-INFO:lightwood-53258:Finished statistical analysis
-
-
-

Let’s take a look at our JSON-AI and print to file.

-
-
[4]:
-
-
-
-print(json_ai.to_json())
-
-
-
-
-
-
-
-
-{
-    "features": {
-        "model": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            },
-            "data_dtype": "categorical"
-        },
-        "year": {
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {}
-            },
-            "data_dtype": "integer"
-        },
-        "transmission": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            },
-            "data_dtype": "categorical"
-        },
-        "mileage": {
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {}
-            },
-            "data_dtype": "integer"
-        },
-        "fuelType": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            },
-            "data_dtype": "categorical"
-        },
-        "tax": {
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {}
-            },
-            "data_dtype": "integer"
-        },
-        "mpg": {
-            "encoder": {
-                "module": "Float.NumericEncoder",
-                "args": {}
-            },
-            "data_dtype": "float"
-        },
-        "engineSize": {
-            "encoder": {
-                "module": "Float.NumericEncoder",
-                "args": {}
-            },
-            "data_dtype": "float"
-        }
-    },
-    "outputs": {
-        "price": {
-            "data_dtype": "integer",
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {
-                    "is_target": "True",
-                    "positive_domain": "$statistical_analysis.positive_domain"
-                }
-            },
-            "mixers": [
-                {
-                    "module": "Neural",
-                    "args": {
-                        "fit_on_dev": true,
-                        "stop_after": "$problem_definition.seconds_per_mixer",
-                        "search_hyperparameters": true
-                    }
-                },
-                {
-                    "module": "LightGBM",
-                    "args": {
-                        "stop_after": "$problem_definition.seconds_per_mixer",
-                        "fit_on_dev": true
-                    }
-                },
-                {
-                    "module": "Regression",
-                    "args": {
-                        "stop_after": "$problem_definition.seconds_per_mixer"
-                    }
-                }
-            ],
-            "ensemble": {
-                "module": "BestOf",
-                "args": {
-                    "args": "$pred_args",
-                    "accuracy_functions": "$accuracy_functions",
-                    "ts_analysis": null
-                }
-            }
-        }
-    },
-    "problem_definition": {
-        "target": "price",
-        "pct_invalid": 2,
-        "unbias_target": true,
-        "seconds_per_mixer": 3011,
-        "seconds_per_encoder": 0,
-        "time_aim": 13552.040324918955,
-        "target_weights": null,
-        "positive_domain": false,
-        "timeseries_settings": {
-            "is_timeseries": false,
-            "order_by": null,
-            "window": null,
-            "group_by": null,
-            "use_previous_target": true,
-            "nr_predictions": null,
-            "historical_columns": null,
-            "target_type": "",
-            "allow_incomplete_history": false
-        },
-        "anomaly_detection": true,
-        "ignore_features": [],
-        "fit_on_all": true,
-        "strict_mode": true,
-        "seed_nr": 420
-    },
-    "identifiers": {},
-    "accuracy_functions": [
-        "r2_score"
-    ]
-}
-
-
-
-
-

3) Create your custom encoder (LabelEncoder).

-

Once our JSON-AI is filled, let’s make our LabelEncoder. All Lightwood encoders inherit from the BaseEncoder class, found here.

-

BaseEncoder

-

The BaseEncoder has 5 expected calls:

-
    -
  • __init__: instantiate the encoder

  • -
  • prepare: Train or create the rules of the encoder

  • -
  • encode: Given data, convert to the featurized representation

  • -
  • decode: Given featurized representations, revert back to data

  • -
  • to: Use CPU/GPU (mostly important for learned representations)

  • -
-

From above, we see that “model”, “transmission”, and “fuelType” are all categorical columns. These will be the ones we want to modify.

-
-

LabelEncoder

-

The LabelEncoder should satisfy a couple of rules

-
    -
  1. For the __init__ call:

  2. -
-
    -
  • Specify the only argument is_target; this asks whether the encoder aims to represent the target column.

  • -
  • Set is_prepared=False in the initialization. All encoders are prepared using their prepare() call, which turns this flag on to True if preparation of the encoders is successful.

  • -
  • Set output_size=1; the output size refers to how many options the represented encoder may adopt.

  • -
-
    -
  1. For the prepare call:

  2. -
-
    -
  • Specify the only argument priming_data; this provides the pd.Series of the data column for the encoder.

  • -
  • Find all unique categories in the column data

  • -
  • Make a dictionary representing label number to category (reserves 0 as Unknown) and the inverse dictionary

  • -
  • Set is_prepared=True

  • -
-
    -
  1. The encode() call will convert each data point’s category name into the encoded label.

  2. -
  3. The decode() call will convert a previously encoded label into the original category name.

  4. -
-

Given this approach only uses simple dictionaries, there is no need for a dedicated to() call (although this would inherit BaseEncoder’s implementation).

-

This implementation would look as follows:

-
"""
-2021.10.13
-
-Create a LabelEncoder that transforms categorical data into a label.
-"""
-import pandas as pd
-import torch
-
-from lightwood.encoder import BaseEncoder
-from typing import List, Union
-from lightwood.helpers.log import log
-
-
-class LabelEncoder(BaseEncoder):
-    """
-    Create a label representation for categorical data. The data will rely on sorted to organize the order of the labels.
-
-    Class Attributes:
-    - is_target: Whether this is used to encode the target
-    - is_prepared: Whether the encoder rules have been set (after ``prepare`` is called)
-
-    """  # noqa
-
-    is_target: bool
-    is_prepared: bool
-
-    is_timeseries_encoder: bool = False
-    is_trainable_encoder: bool = False
-
-    def __init__(self, is_target: bool = False) -> None:
-        """
-        Initialize the Label Encoder
-
-        :param is_target:
-        """
-        self.is_target = is_target
-        self.is_prepared = False
-
-        # Size of the output encoded dimension per data point
-        # For LabelEncoder, this is always 1 (1 label per category)
-        self.output_size = 1
-
-    # Not all encoders need to be prepared
-    def prepare(self, priming_data: pd.Series) -> None:
-        """
-        Create a LabelEncoder for categorical data.
-
-        LabelDict creates a mapping where each index is associated to a category.
-
-        :param priming_data: Input column data that is categorical.
-
-        :returns: Nothing; prepares encoder rules with `label_dict` and `ilabel_dict`
-        """
-
-        # Find all unique categories in the dataset
-        categories = priming_data.unique()
-
-        log.info("Categories Detected = " + str(self.output_size))
-
-        # Create the Category labeller
-        self.label_dict = {"Unknown": 0}  # Include an unknown category
-        self.label_dict.update({cat: idx + 1 for idx, cat in enumerate(categories)})
-        self.ilabel_dict = {idx: cat for cat, idx in self.label_dict.items()}
-
-        self.is_prepared = True
-
-    def encode(self, column_data: Union[pd.Series, list]) -> torch.Tensor:
-        """
-        Convert pre-processed data into the labeled values
-
-        :param column_data: Pandas series to convert into labels
-        """
-        if isinstance(column_data, pd.Series):
-            enc = column_data.apply(lambda x: self.label_dict.get(x, 0)).tolist()
-        else:
-            enc = [self.label_dict.get(x, 0) for x in column_data]
-
-        return torch.Tensor(enc).int().unsqueeze(1)
-
-    def decode(self, encoded_data: torch.Tensor) -> List[object]:
-        """
-        Convert torch.Tensor labels into categorical data
-
-        :param encoded_data: Encoded data in the form of a torch.Tensor
-        """
-        return [self.ilabel_dict[i.item()] for i in encoded_data]
-
-
-

Some additional notes: (1) The encode() call should be able to intake a list of values, it is optional to make it compatible with pd.Series or pd.DataFrame (2) The output of encode() must be a torch tensor with dimensionality \(N_{rows} x N_{output}\).

-

Now that the LabelEncoder is complete, move this to ~/lightwood_modules and we’re ready to try this out!

-
-
-
-

4) Edit JSON-AI

-

Now that we have our LabelEncoder script, we have two ways of introducing this encoder:

-
    -
  1. Change all categorical columns to our encoder of choice

  2. -
  3. Replace the default encoder (Categorical.OneHotEncoder) for categorical data to our encoder of choice

  4. -
-

In the first scenario, we may not want to change ALL columns. By switching the encoder on a Feature level, Lightwood allows you to control how representations for a given feature are handled. However, suppose you want to replace an approach entirely with your own methodology - Lightwood supports overriding default methods to control how you want to treat a data type as well.

-

Below, we’ll show both strategies:

-

The first strategy requires just specifying which features you’d like to change. Once you have your list, you can manually set the encoder “module” to the class you’d like. This is best suited for a few columns or if you only want to override a few particular columns as opposed to replacing the ``Encoder`` behavior for an entire data type. #### Strategy 1: Change the encoders for the features directly

-
for ft in ["model", "transmission", "fuelType"]: # Features you want to replace
-    # Set each feature to the custom encoder
-    json_ai.features[ft].encoder['module'] = 'LabelEncoder.LabelEncoder'
-
-
-

Suppose you have many columns that are categorical- you may want to enforce your approach explicitly without naming each column. This can be done by examining the data_dtype of JSON-AI’s features. For all features that are type categorical (while this is a str, it’s ideal to import dtype and explicitly check the data type), replace the default Encoder with your encoder. In this case, this is LabelEncoder.LabelEncoder. #### Strategy 2: Programatically change all encoder -assignments for a data type

-
from lightwood.api import dtype
-for i in json_ai.features:
-    if json_ai.features[i].data_dtype == dtype.categorical:
-        json_ai.features[i].encoder['module'] = 'LabelEncoder.LabelEncoder'
-
-
-

We’ll go with the first approach for simplicity:

-
-
[5]:
-
-
-
-for ft in ["model", "transmission", "fuelType"]: # Features you want to replace
-    # Set each feature to the custom encoder
-    json_ai.features[ft].encoder['module'] = 'LabelEncoder.LabelEncoder'
-
-
-
-
-
-

5) Generate code and your predictor from JSON-AI

-

Now, let’s use this JSON-AI object to generate code and make a predictor. This can be done in 2 simple lines, below:

-
-
[6]:
-
-
-
-#Generate python code that fills in your pipeline
-code = code_from_json_ai(json_ai)
-
-# Turn the code above into a predictor object
-predictor = predictor_from_code(code)
-
-
-
-
-
-
-
-
-INFO:lightwood-53258:Unable to import black formatter, predictor code might be a bit ugly.
-
-
-

Now, let’s run our pipeline. To do so, let’s first:

-
    -
  1. Perform a statistical analysis on the data (this is important in preparing Encoders/Mixers as it populates the StatisticalAnalysis attribute with details some encoders need).

  2. -
  3. Clean our data

  4. -
  5. Prepare the encoders

  6. -
  7. Featurize the data

  8. -
-
-
[7]:
-
-
-
-# Perform Stats Analysis
-predictor.analyze_data(df)
-
-# Pre-process the data
-cleaned_data = predictor.preprocess(data=df)
-
-# Create a train/test split
-split_data = predictor.split(cleaned_data)
-
-# Prepare the encoders
-predictor.prepare(split_data)
-
-# Featurize the data
-ft_data = predictor.featurize(split_data)
-
-
-
-
-
-
-
-
-INFO:lightwood-53258:Performing statistical analysis on data
-INFO:lightwood-53258:Starting statistical analysis
-INFO:lightwood-53258:Finished statistical analysis
-INFO:lightwood-53258:Cleaning the data
-INFO:lightwood-53258:Splitting the data into train/test
-INFO:lightwood-53258:Preparing the encoders
-INFO:lightwood-53258:Encoder prepping dict length of: 1
-INFO:lightwood-53258:Encoder prepping dict length of: 2
-INFO:lightwood-53258:Encoder prepping dict length of: 3
-INFO:lightwood-53258:Encoder prepping dict length of: 4
-INFO:lightwood-53258:Encoder prepping dict length of: 5
-INFO:lightwood-53258:Encoder prepping dict length of: 6
-INFO:lightwood-53258:Encoder prepping dict length of: 7
-INFO:lightwood-53258:Encoder prepping dict length of: 8
-INFO:lightwood-53258:Encoder prepping dict length of: 9
-INFO:lightwood-53258:Categories Detected = 1
-INFO:lightwood-53258:Categories Detected = 1
-INFO:lightwood-53258:Categories Detected = 1
-INFO:lightwood-53258:Done running for: price
-INFO:lightwood-53258:Done running for: model
-INFO:lightwood-53258:Done running for: year
-INFO:lightwood-53258:Done running for: transmission
-INFO:lightwood-53258:Done running for: mileage
-INFO:lightwood-53258:Done running for: fuelType
-INFO:lightwood-53258:Done running for: tax
-INFO:lightwood-53258:Done running for: mpg
-INFO:lightwood-53258:Done running for: engineSize
-INFO:lightwood-53258:Featurizing the data
-
-
-

The splitter creates 3 data-splits, a “train”, “dev”, and “test” set. The featurize command from the predictor allows us to convert the cleaned data into features. We can access this as follows:

-
-
[8]:
-
-
-
-# Pick a categorical column name
-col_name = "fuelType"
-
-# Get the encoded feature data
-enc_ft = ft_data["train"].get_encoded_column_data(col_name).squeeze(1) #torch tensor (N_rows x N_output_dim)
-
-# Get the original data from the dataset
-orig_data = ft_data["train"].get_column_original_data(col_name) #pandas dataframe
-
-# Create a pandas data frame to compare encoded data and original data
-compare_data = pd.concat([orig_data, pd.Series(enc_ft, name="EncData")], axis=1)
-compare_data.head()
-
-
-
-
-
[8]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
fuelTypeEncData
0Diesel1
1Diesel1
2Diesel1
3Petrol2
4Diesel1
-
-
-

We can see what the label mapping is by inspecting our encoders as follows:

-
-
[9]:
-
-
-
-# Label Name -> Label Number
-print(predictor.encoders[col_name].label_dict)
-
-
-
-
-
-
-
-
-{'Unknown': 0, 'Diesel': 1, 'Petrol': 2, 'Hybrid': 3}
-
-
-

For each category above, the number associated in the dictionary is the label for each category. This means “Diesel” is always represented by a 1, etc.

-

With that, you’ve created your own custom Encoder that uses a rule-based approach! Please checkout more tutorials for other custom approach guides.

-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.ipynb b/docs/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.ipynb deleted file mode 100644 index b708714d7..000000000 --- a/docs/tutorials/custom_encoder_rulebased/custom_encoder_rulebased.ipynb +++ /dev/null @@ -1,887 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "smooth-philip", - "metadata": {}, - "source": [ - "### Custom Encoder: Rule-Based\n", - "\n", - "Lightwood uses \"Encoders\" to convert preprocessed (cleaned) data into **features**. Encoders represent the **feature engineering** step of the data science pipeline; they can either have a set of instructions (\"rule-based\") or a learned representation (trained on data).\n", - "\n", - "In the following notebook, we will experiment with creating a custom encoder that creates **Label Encoding**. \n", - "\n", - "For example, imagine we have the following set of categories:\n", - "\n", - "```\n", - "MyColumnData = [\"apple\", \"orange\", \"orange\", \"banana\", \"apple\", \"dragonfruit\"]\n", - "```\n", - "\n", - "There are 4 categories to consider: \"apple\", \"banana\", \"orange\", and \"dragonfruit\".\n", - "\n", - "**Label encoding** allows you to refer to these categories as if they were numbers. For example, consider the mapping (arranged alphabetically):\n", - "\n", - "1 - apple
\n", - "2 - banana
\n", - "3 - dragonfruit
\n", - "4 - orange
\n", - "\n", - "Using this mapping, we can convert the above data as follows:\n", - "\n", - "```\n", - "MyFeatureData = [1, 4, 4, 2, 1, 3]\n", - "```\n", - "\n", - "In the following notebook, we will design a **LabelEncoder** for Lightwood for use on categorical data. We will be using the Kaggle \"Used Car\" [dataset](https://www.kaggle.com/adityadesai13/used-car-dataset-ford-and-mercedes). We've provided a link for you to automatically access this CSV. This dataset describes various details of cars on sale - with the goal of predicting how much this car may sell for.\n", - "\n", - "Let's get started." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "raising-adventure", - "metadata": {}, - "outputs": [], - "source": [ - "import pandas as pd\n", - "\n", - "# Lightwood modules\n", - "import lightwood as lw\n", - "from lightwood import ProblemDefinition, \\\n", - " JsonAI, \\\n", - " json_ai_from_problem, \\\n", - " code_from_json_ai, \\\n", - " predictor_from_code" - ] - }, - { - "cell_type": "markdown", - "id": "instant-income", - "metadata": {}, - "source": [ - "### 1) Load your data\n", - "\n", - "Lightwood works with `pandas.DataFrame`s; load data via pandas as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "technical-government", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
modelyearpricetransmissionmileagefuelTypetaxmpgengineSize
0A1201712500Manual15735Petrol15055.41.4
1A6201616500Automatic36203Diesel2064.22.0
2A1201611000Manual29946Petrol3055.41.4
3A4201716800Automatic25952Diesel14567.32.0
4A3201917300Manual1998Petrol14549.61.0
\n", - "
" - ], - "text/plain": [ - " model year price transmission mileage fuelType tax mpg engineSize\n", - "0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4\n", - "1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0\n", - "2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4\n", - "3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0\n", - "4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "filename = 'https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/used_car_price/data.csv'\n", - "df = pd.read_csv(filename)\n", - "df.head()" - ] - }, - { - "cell_type": "markdown", - "id": "anonymous-rainbow", - "metadata": {}, - "source": [ - "We can see a handful of columns above, such as `model, year, price, transmission, mileage, fuelType, tax, mpg, engineSize`. Some columns are numerical whereas others are categorical. We are going to specifically only focus on categorical columns.\n", - "\n", - "\n", - "### 2) Generate JSON-AI Syntax\n", - "\n", - "We will make a `LabelEncoder` as follows:\n", - "\n", - "(1) Find all unique examples within a column
\n", - "(2) Order the examples in a consistent way
\n", - "(3) Label (python-index of 0 as start) each category
\n", - "(4) Assign the label according to each datapoint.
\n", - "\n", - "First, let's generate a JSON-AI syntax so we can automatically identify each column. " - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "absent-maker", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-53258:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Analyzing a sample of 6920\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:from a total population of 10668, this is equivalent to 64.9% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Using 15 processes to deduct types.\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: year\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: model\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: price\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: mileage\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: transmission\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: fuelType\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: tax\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: mpg\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Infering type for: engineSize\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column year has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column tax has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column price has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column mileage has data type integer\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column engineSize has data type float\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column mpg has data type float\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column fuelType has data type categorical\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column transmission has data type categorical\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Column model has data type categorical\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Finished statistical analysis\u001b[0m\n" - ] - } - ], - "source": [ - "# Create the Problem Definition\n", - "pdef = ProblemDefinition.from_dict({\n", - " 'target': 'price', # column you want to predict\n", - " #'ignore_features': ['year', 'mileage', 'tax', 'mpg', 'engineSize']\n", - "})\n", - "\n", - "# Generate a JSON-AI object\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)" - ] - }, - { - "cell_type": "markdown", - "id": "swedish-riverside", - "metadata": {}, - "source": [ - "Let's take a look at our JSON-AI and print to file." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "coastal-paragraph", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"features\": {\n", - " \"model\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"categorical\"\n", - " },\n", - " \"year\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"integer\"\n", - " },\n", - " \"transmission\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"categorical\"\n", - " },\n", - " \"mileage\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"integer\"\n", - " },\n", - " \"fuelType\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"categorical\"\n", - " },\n", - " \"tax\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"integer\"\n", - " },\n", - " \"mpg\": {\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"float\"\n", - " },\n", - " \"engineSize\": {\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {}\n", - " },\n", - " \"data_dtype\": \"float\"\n", - " }\n", - " },\n", - " \"outputs\": {\n", - " \"price\": {\n", - " \"data_dtype\": \"integer\",\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {\n", - " \"is_target\": \"True\",\n", - " \"positive_domain\": \"$statistical_analysis.positive_domain\"\n", - " }\n", - " },\n", - " \"mixers\": [\n", - " {\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": true,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"LightGBM\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"fit_on_dev\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"Regression\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\"\n", - " }\n", - " }\n", - " ],\n", - " \"ensemble\": {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " \"ts_analysis\": null\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"problem_definition\": {\n", - " \"target\": \"price\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": true,\n", - " \"seconds_per_mixer\": 3011,\n", - " \"seconds_per_encoder\": 0,\n", - " \"time_aim\": 13552.040324918955,\n", - " \"target_weights\": null,\n", - " \"positive_domain\": false,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": false,\n", - " \"order_by\": null,\n", - " \"window\": null,\n", - " \"group_by\": null,\n", - " \"use_previous_target\": true,\n", - " \"nr_predictions\": null,\n", - " \"historical_columns\": null,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": false\n", - " },\n", - " \"anomaly_detection\": true,\n", - " \"ignore_features\": [],\n", - " \"fit_on_all\": true,\n", - " \"strict_mode\": true,\n", - " \"seed_nr\": 420\n", - " },\n", - " \"identifiers\": {},\n", - " \"accuracy_functions\": [\n", - " \"r2_score\"\n", - " ]\n", - "}\n" - ] - } - ], - "source": [ - "print(json_ai.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "expired-flour", - "metadata": {}, - "source": [ - "### 3) Create your custom encoder (`LabelEncoder`).\n", - "\n", - "Once our JSON-AI is filled, let's make our LabelEncoder. All Lightwood encoders inherit from the `BaseEncoder` class, found [here](https://github.com/mindsdb/lightwood/blob/staging/lightwood/encoder/base.py). \n", - "\n", - "![BaseEncoder](baseencoder.png)\n", - "\n", - "\n", - "The `BaseEncoder` has 5 expected calls:\n", - "\n", - "- `__init__`: instantiate the encoder\n", - "- `prepare`: Train or create the rules of the encoder\n", - "- `encode`: Given data, convert to the featurized representation\n", - "- `decode`: Given featurized representations, revert back to data\n", - "- `to`: Use CPU/GPU (mostly important for learned representations)\n", - "\n", - "From above, we see that \"model\", \"transmission\", and \"fuelType\" are all categorical columns. These will be the ones we want to modify." - ] - }, - { - "cell_type": "markdown", - "id": "verbal-northwest", - "metadata": {}, - "source": [ - "##### `LabelEncoder`\n", - "\n", - "The `LabelEncoder` should satisfy a couple of rules\n", - "\n", - "(1) For the ``__init__`` call:
\n", - " - Specify the only argument `is_target`; this asks whether the encoder aims to represent the target column.
\n", - " - Set `is_prepared=False` in the initialization. All encoders are prepared using their `prepare()` call, which turns this flag on to `True` if preparation of the encoders is successful.
\n", - " - Set `output_size=1`; the output size refers to how many options the represented encoder may adopt. \n", - " \n", - " \n", - "(2) For the ``prepare`` call:\n", - " - Specify the only argument `priming_data`; this provides the `pd.Series` of the data column for the encoder.\n", - " - Find all unique categories in the column data\n", - " - Make a dictionary representing label number to category (reserves 0 as Unknown) and the inverse dictionary\n", - " - Set `is_prepared=True`\n", - " \n", - "(3) The `encode()` call will convert each data point's category name into the encoded label.\n", - "\n", - "(4) The `decode()` call will convert a previously encoded label into the original category name.\n", - "\n", - "Given this approach only uses simple dictionaries, there is no need for a dedicated `to()` call (although this would inherit `BaseEncoder`'s implementation).\n", - "\n", - "This implementation would look as follows:" - ] - }, - { - "cell_type": "markdown", - "id": "approximate-situation", - "metadata": {}, - "source": [ - "```python\n", - "\"\"\"\n", - "2021.10.13\n", - "\n", - "Create a LabelEncoder that transforms categorical data into a label.\n", - "\"\"\"\n", - "import pandas as pd\n", - "import torch\n", - "\n", - "from lightwood.encoder import BaseEncoder\n", - "from typing import List, Union\n", - "from lightwood.helpers.log import log\n", - "\n", - "\n", - "class LabelEncoder(BaseEncoder):\n", - " \"\"\"\n", - " Create a label representation for categorical data. The data will rely on sorted to organize the order of the labels.\n", - "\n", - " Class Attributes:\n", - " - is_target: Whether this is used to encode the target\n", - " - is_prepared: Whether the encoder rules have been set (after ``prepare`` is called)\n", - "\n", - " \"\"\" # noqa\n", - "\n", - " is_target: bool\n", - " is_prepared: bool\n", - "\n", - " is_timeseries_encoder: bool = False\n", - " is_trainable_encoder: bool = False\n", - "\n", - " def __init__(self, is_target: bool = False) -> None:\n", - " \"\"\"\n", - " Initialize the Label Encoder\n", - "\n", - " :param is_target:\n", - " \"\"\"\n", - " self.is_target = is_target\n", - " self.is_prepared = False\n", - "\n", - " # Size of the output encoded dimension per data point\n", - " # For LabelEncoder, this is always 1 (1 label per category)\n", - " self.output_size = 1\n", - "\n", - " # Not all encoders need to be prepared\n", - " def prepare(self, priming_data: pd.Series) -> None:\n", - " \"\"\"\n", - " Create a LabelEncoder for categorical data.\n", - "\n", - " LabelDict creates a mapping where each index is associated to a category.\n", - "\n", - " :param priming_data: Input column data that is categorical.\n", - "\n", - " :returns: Nothing; prepares encoder rules with `label_dict` and `ilabel_dict`\n", - " \"\"\"\n", - "\n", - " # Find all unique categories in the dataset\n", - " categories = priming_data.unique()\n", - "\n", - " log.info(\"Categories Detected = \" + str(self.output_size))\n", - "\n", - " # Create the Category labeller\n", - " self.label_dict = {\"Unknown\": 0} # Include an unknown category\n", - " self.label_dict.update({cat: idx + 1 for idx, cat in enumerate(categories)})\n", - " self.ilabel_dict = {idx: cat for cat, idx in self.label_dict.items()}\n", - "\n", - " self.is_prepared = True\n", - "\n", - " def encode(self, column_data: Union[pd.Series, list]) -> torch.Tensor:\n", - " \"\"\"\n", - " Convert pre-processed data into the labeled values\n", - "\n", - " :param column_data: Pandas series to convert into labels\n", - " \"\"\"\n", - " if isinstance(column_data, pd.Series):\n", - " enc = column_data.apply(lambda x: self.label_dict.get(x, 0)).tolist()\n", - " else:\n", - " enc = [self.label_dict.get(x, 0) for x in column_data]\n", - "\n", - " return torch.Tensor(enc).int().unsqueeze(1)\n", - "\n", - " def decode(self, encoded_data: torch.Tensor) -> List[object]:\n", - " \"\"\"\n", - " Convert torch.Tensor labels into categorical data\n", - "\n", - " :param encoded_data: Encoded data in the form of a torch.Tensor\n", - " \"\"\"\n", - " return [self.ilabel_dict[i.item()] for i in encoded_data]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "optical-archive", - "metadata": {}, - "source": [ - "Some additional notes:\n", - "(1) The `encode()` call should be able to intake a list of values, it is optional to make it compatible with `pd.Series` or `pd.DataFrame`
\n", - "(2) The output of `encode()` must be a torch tensor with dimensionality $N_{rows} x N_{output}$.\n", - "\n", - "Now that the `LabelEncoder` is complete, move this to `~/lightwood_modules` and we're ready to try this out!\n", - "\n", - "### 4) Edit JSON-AI\n", - "\n", - "Now that we have our `LabelEncoder` script, we have two ways of introducing this encoder:\n", - "\n", - "(1) Change all categorical columns to our encoder of choice
\n", - "(2) Replace the default encoder (`Categorical.OneHotEncoder`) for categorical data to our encoder of choice
\n", - "\n", - "In the first scenario, we may not want to change ALL columns. By switching the encoder on a `Feature` level, Lightwood allows you to control how representations for a given feature are handled. However, suppose you want to replace an approach entirely with your own methodology - Lightwood supports overriding default methods to control how you want to treat a *data type* as well.\n", - "\n", - "Below, we'll show both strategies:" - ] - }, - { - "cell_type": "markdown", - "id": "quiet-lodging", - "metadata": {}, - "source": [ - "The first strategy requires just specifying which features you'd like to change. Once you have your list, you can manually set the encoder \"module\" to the class you'd like. **This is best suited for a few columns or if you only want to override a few particular columns as opposed to replacing the `Encoder` behavior for an entire data type**.\n", - "#### Strategy 1: Change the encoders for the features directly\n", - "```python\n", - "for ft in [\"model\", \"transmission\", \"fuelType\"]: # Features you want to replace\n", - " # Set each feature to the custom encoder\n", - " json_ai.features[ft].encoder['module'] = 'LabelEncoder.LabelEncoder'\n", - "```\n", - "\n", - "\n", - "Suppose you have many columns that are categorical- you may want to enforce your approach explicitly without naming each column. This can be done by examining the `data_dtype` of JSON-AI's features. For all features that are type `categorical` (while this is a `str`, it's ideal to import dtype and explicitly check the data type), replace the default `Encoder` with your encoder. In this case, this is `LabelEncoder.LabelEncoder`.\n", - "#### Strategy 2: Programatically change *all* encoder assignments for a data type\n", - "\n", - "```python\n", - "from lightwood.api import dtype\n", - "for i in json_ai.features:\n", - " if json_ai.features[i].data_dtype == dtype.categorical:\n", - " json_ai.features[i].encoder['module'] = 'LabelEncoder.LabelEncoder'\n", - "```\n", - "\n", - "We'll go with the first approach for simplicity:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "elementary-fusion", - "metadata": {}, - "outputs": [], - "source": [ - "for ft in [\"model\", \"transmission\", \"fuelType\"]: # Features you want to replace\n", - " # Set each feature to the custom encoder\n", - " json_ai.features[ft].encoder['module'] = 'LabelEncoder.LabelEncoder'" - ] - }, - { - "cell_type": "markdown", - "id": "together-austria", - "metadata": {}, - "source": [ - "### 5) Generate code and your predictor from JSON-AI\n", - "\n", - "Now, let's use this JSON-AI object to generate code and make a predictor. This can be done in 2 simple lines, below:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "inappropriate-james", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-53258:Unable to import black formatter, predictor code might be a bit ugly.\u001b[0m\n" - ] - } - ], - "source": [ - "#Generate python code that fills in your pipeline\n", - "code = code_from_json_ai(json_ai)\n", - "\n", - "# Turn the code above into a predictor object\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "markdown", - "id": "personalized-andorra", - "metadata": {}, - "source": [ - "Now, let's run our pipeline. To do so, let's first:\n", - "\n", - "(1) Perform a statistical analysis on the data (*this is important in preparing Encoders/Mixers as it populates the* `StatisticalAnalysis` *attribute with details some encoders need*).
\n", - "(2) Clean our data
\n", - "(3) Prepare the encoders
\n", - "(4) Featurize the data
" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "palestinian-harvey", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-53258:Performing statistical analysis on data\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Splitting the data into train/test\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Preparing the encoders\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 2\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 3\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 4\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 5\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 6\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 7\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 8\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Encoder prepping dict length of: 9\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Categories Detected = 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Categories Detected = 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Categories Detected = 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: price\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: model\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: year\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: transmission\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: mileage\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: fuelType\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: tax\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: mpg\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Done running for: engineSize\u001b[0m\n", - "\u001b[32mINFO:lightwood-53258:Featurizing the data\u001b[0m\n" - ] - } - ], - "source": [ - "# Perform Stats Analysis\n", - "predictor.analyze_data(df)\n", - "\n", - "# Pre-process the data\n", - "cleaned_data = predictor.preprocess(data=df)\n", - "\n", - "# Create a train/test split\n", - "split_data = predictor.split(cleaned_data)\n", - "\n", - "# Prepare the encoders \n", - "predictor.prepare(split_data)\n", - "\n", - "# Featurize the data\n", - "ft_data = predictor.featurize(split_data)" - ] - }, - { - "cell_type": "markdown", - "id": "ordered-beast", - "metadata": {}, - "source": [ - "The splitter creates 3 data-splits, a \"train\", \"dev\", and \"test\" set. The `featurize` command from the predictor allows us to convert the cleaned data into features. We can access this as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "silent-dealing", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
fuelTypeEncData
0Diesel1
1Diesel1
2Diesel1
3Petrol2
4Diesel1
\n", - "
" - ], - "text/plain": [ - " fuelType EncData\n", - "0 Diesel 1\n", - "1 Diesel 1\n", - "2 Diesel 1\n", - "3 Petrol 2\n", - "4 Diesel 1" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Pick a categorical column name\n", - "col_name = \"fuelType\"\n", - "\n", - "# Get the encoded feature data\n", - "enc_ft = ft_data[\"train\"].get_encoded_column_data(col_name).squeeze(1) #torch tensor (N_rows x N_output_dim)\n", - "\n", - "# Get the original data from the dataset\n", - "orig_data = ft_data[\"train\"].get_column_original_data(col_name) #pandas dataframe\n", - "\n", - "# Create a pandas data frame to compare encoded data and original data\n", - "compare_data = pd.concat([orig_data, pd.Series(enc_ft, name=\"EncData\")], axis=1)\n", - "compare_data.head()" - ] - }, - { - "cell_type": "markdown", - "id": "fatty-peoples", - "metadata": {}, - "source": [ - "We can see what the label mapping is by inspecting our encoders as follows:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "superior-mobility", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'Unknown': 0, 'Diesel': 1, 'Petrol': 2, 'Hybrid': 3}\n" - ] - } - ], - "source": [ - "# Label Name -> Label Number\n", - "print(predictor.encoders[col_name].label_dict)" - ] - }, - { - "cell_type": "markdown", - "id": "frequent-remedy", - "metadata": {}, - "source": [ - "For each category above, the number associated in the dictionary is the label for each category. This means \"Diesel\" is always represented by a 1, etc.\n", - "\n", - "With that, you've created your own custom Encoder that uses a rule-based approach! Please checkout more [tutorials](https://lightwood.io/tutorials.html) for other custom approach guides." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/tutorials/custom_explainer/custom_explainer.html b/docs/tutorials/custom_explainer/custom_explainer.html deleted file mode 100644 index 045155916..000000000 --- a/docs/tutorials/custom_explainer/custom_explainer.html +++ /dev/null @@ -1,966 +0,0 @@ - - - - - - - - - - Tutorial - Implementing a custom analysis block in Lightwood — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Tutorial - Implementing a custom analysis block in Lightwood
  • - - -
  • - - - View page source - - -
  • - -
- - -
-
-
-
- - - -
-

Tutorial - Implementing a custom analysis block in Lightwood

-
-

Introduction

-

As you might already know, Lightwood is designed to be a flexible machine learning (ML) library that is able to abstract and automate the entire ML pipeline. Crucially, it is also designed to be extended or modified very easily according to your needs, essentially offering the entire spectrum between fully automated AutoML and a lightweight wrapper for customized ML pipelines.

-

As such, we can identify several different customizable “phases” in the process. The relevant phase for this tutorial is the “analysis” that comes after a predictor has been trained. The goal of this phase is to generate useful insights, like accuracy metrics, confusion matrices, feature importance, etc. These particular examples are all included in the core analysis procedure that Lightwood executes.

-

However, the analysis procedure is structured into a sequential execution of “analysis blocks”. Each analysis block should generate a well-defined set of insights, as well as handling any actions regarding these at inference time.

-

As an example, one of the core blocks is the Inductive Conformal Prediction (ICP) block, which handles the confidence estimation of all Lightwood predictors. The logic within can be complex at times, but thanks to the block abstraction we can deal with it in a structured manner. As this ICP block is used when generating predictions, it implements the two main methods that the BaseAnalysisBlock class specifies: .analyze() to setup everything that is needed, and .explain() to -actually estimate the confidence in any given prediction.

-
-
-

Objective

-

In this tutorial, we will go through the steps required to implement your own analysis blocks to customize the insights of any Lightwood predictor!

-

In particular, we will implement a “model correlation heatmap” block: we want to compare the predictions of all mixers inside a BestOf ensemble object, to understand how they might differ in their overall behavior.

-
-
[1]:
-
-
-
-from typing import Dict, Tuple
-import pandas as pd
-import lightwood
-lightwood.__version__
-
-
-
-
-
[1]:
-
-
-
-
-'1.3.0'
-
-
-
-
-

Step 1: figuring out what we need

-

When designing an analysis block, an important choice needs to be made: will this block operate when calling the predictor? Or is it only going to describe its performance once in the held-out validation dataset?

-

Being in the former case means we need to implement both .analyze() and .explain() methods, while the latter case only needs an .analyze() method. Our ModelCorrelationHeatmap belongs to this second category.

-

Let’s start the implementation by inheriting from BaseAnalysisBlock:

-
-
[2]:
-
-
-
-from lightwood.analysis import BaseAnalysisBlock
-
-class ModelCorrelationHeatmap(BaseAnalysisBlock):
-    def __init__(self):
-        super().__init__()
-
-    def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:
-        return info
-
-    def explain(self,
-                row_insights: pd.DataFrame,
-                global_insights: Dict[str, object], **kwargs) -> Tuple[pd.DataFrame, Dict[str, object]]:
-
-        return row_insights, global_insights
-
-
-
-
-
[3]:
-
-
-
-ModelCorrelationHeatmap()
-
-
-
-
-
[3]:
-
-
-
-
-<__main__.ModelCorrelationHeatmap at 0x7fa85c015970>
-
-
-

Right now, our newly created analysis block doesn’t do much, apart from returning the info and insights (row_insights and global_insights) exactly as it received them from the previous block.

-

As previously discussed, we only need to implement a procedure that runs post-training, no action is required at inference time. This means we can use the default .explain() behavior in the parent class:

-
-
[4]:
-
-
-
-class ModelCorrelationHeatmap(BaseAnalysisBlock):
-    def __init__(self):
-        super().__init__()
-
-    def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:
-        return info
-
-
-
-
-
-

Step 2: Implementing the custom analysis block

-

Okay, now for the fun bit: we have to implement a correlation heatmap between the predictions of all mixers inside a BestOf ensemble. This is currently the only ensemble implemented in Lightwood, but it is a good idea to explicitly check that the type of the ensemble is what we expect.

-

A natural question to ask at this point is: what information do we have to implement the procedure? You’ll note that, apart from the info dictionary, we receive a kwargs dictionary. You can check out the full documentation for more details, but the keys (and respective value types) exposed in this object by default are:

-
-
[5]:
-
-
-
-kwargs = {
-        'predictor': 'lightwood.ensemble.BaseEnsemble',
-        'target': 'str',
-        'input_cols': 'list',
-        'dtype_dict': 'dict',
-        'normal_predictions': 'pd.DataFrame',
-        'data': 'pd.DataFrame',
-        'train_data': 'lightwood.data.encoded_ds.EncodedDs',
-        'encoded_val_data': 'lightwood.data.encoded_ds.EncodedDs',
-        'is_classification': 'bool',
-        'is_numerical': 'bool',
-        'is_multi_ts': 'bool',
-        'stats_info': 'lightwood.api.types.StatisticalAnalysis',
-        'ts_cfg': 'lightwood.api.types.TimeseriesSettings',
-        'accuracy_functions': 'list',
-        'has_pretrained_text_enc': 'bool'
-}
-
-
-
-

As you can see there is lots to work with, but for this example we will focus on using:

-
    -
  1. The predictor ensemble

  2. -
  3. The encoded_val_data to generate predictions for each mixer inside the ensemble

  4. -
-

And the insight we’re want to produce is a matrix that compares the output of all mixers and computes the correlation between them.

-

Let’s implement the algorithm:

-
-
[6]:
-
-
-
-from typing import Dict
-from types import SimpleNamespace
-
-import numpy as np
-
-from lightwood.ensemble import BestOf
-from lightwood.analysis import BaseAnalysisBlock
-
-
-class ModelCorrelationHeatmap(BaseAnalysisBlock):
-    def __init__(self):
-        super().__init__()
-
-    def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:
-        ns = SimpleNamespace(**kwargs)
-
-        # only triggered with the right type of ensemble
-        if isinstance(ns.predictor, BestOf):
-
-            # store prediction from every mixer
-            all_predictions = []
-
-            for mixer in ns.predictor.mixers:
-                predictions = mixer(ns.encoded_val_data).values  # retrieve np.ndarray from the returned pd.DataFrame
-                all_predictions.append(predictions.flatten().astype(int))  # flatten and cast labels to int
-
-            # calculate correlation matrix
-            corrs = np.corrcoef(np.array(all_predictions))
-
-            # save inside `info` object
-            info['mixer_correlation'] = corrs
-
-        return info
-
-
-
-
-

Notice the use of SimpleNamespace for dot notation accessors.

-

The procedure above is fairly straightforward, as we leverage numpy’s corrcoef() function to generate the matrix.

-

Finally, it is very important to add the output to info so that it is saved inside the actual predictor object.

-
-
-

Step 3: Exposing the block to Lightwood

-

To use this in an arbitrary script, we need to add the above class (and all necessary imports) to a .py file inside one of the following directories:

-
    -
  • ~/lightwood_modules (where ~ is your home directory, e.g. /Users/username/ for macOS and /home/username/ for linux

  • -
  • /etc/lightwood_modules

  • -
-

Lightwood will scan these directories and import any class so that they can be found and used by the JsonAI code generating module.

-

To continue, please save the code cell above as ``model_correlation.py`` in one of the indicated directories.

-
-
-

Step 4: Final test run

-

Ok! Everything looks set to try out our custom block. Let’s generate a predictor for this sample dataset, and see whether our new insights are any good.

-

First, it is important to add our ModelCorrelationHeatmap to the analysis_blocks attribute of the Json AI object that will generate your predictor code.

-
-
[7]:
-
-
-
-from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem
-
-# read dataset
-df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/stable/tests/data/hdi.csv')
-
-# define the predictive task
-pdef = ProblemDefinition.from_dict({
-    'target': 'Development Index',         # column you want to predict
-    'time_aim': 100,
-})
-
-# generate the Json AI intermediate representation from the data and its corresponding settings
-json_ai = json_ai_from_problem(df, problem_definition=pdef)
-
-# add the custom list of analysis blocks; in this case, composed of a single block
-json_ai.analysis_blocks = [{
-    'module': 'model_correlation.ModelCorrelationHeatmap',
-    'args': {}
-}]
-
-
-
-
-
-
-
-
-INFO:lightwood-53131:Dropping features: []
-INFO:lightwood-53131:Analyzing a sample of 222
-INFO:lightwood-53131:from a total population of 225, this is equivalent to 98.7% of your data.
-INFO:lightwood-53131:Using 15 processes to deduct types.
-INFO:lightwood-53131:Infering type for: Population
-INFO:lightwood-53131:Infering type for: Area (sq. mi.)
-INFO:lightwood-53131:Infering type for: Pop. Density
-INFO:lightwood-53131:Infering type for: GDP ($ per capita)
-INFO:lightwood-53131:Infering type for: Literacy (%)
-INFO:lightwood-53131:Infering type for: Infant mortality
-INFO:lightwood-53131:Infering type for: Development Index
-INFO:lightwood-53131:Column Area (sq. mi.) has data type integer
-INFO:lightwood-53131:Column Population has data type integer
-INFO:lightwood-53131:Column Development Index has data type categorical
-INFO:lightwood-53131:Column Literacy (%) has data type float
-INFO:lightwood-53131:Column GDP ($ per capita) has data type integer
-INFO:lightwood-53131:Column Infant mortality  has data type float
-INFO:lightwood-53131:Column Pop. Density  has data type float
-INFO:lightwood-53131:Starting statistical analysis
-INFO:lightwood-53131:Finished statistical analysis
-
-
-
-
-
-
-
-model_correlation.py
-model_correlation
-
-
-

We can take a look at the respective Json AI key just to confirm our newly added analysis block is in there:

-
-
[8]:
-
-
-
-json_ai.analysis_blocks
-
-
-
-
-
[8]:
-
-
-
-
-[{'module': 'model_correlation.ModelCorrelationHeatmap', 'args': {}}]
-
-
-

Now we are ready to create a predictor from this Json AI, and subsequently train it:

-
-
[9]:
-
-
-
-from lightwood.api.high_level import code_from_json_ai, predictor_from_code
-
-code = code_from_json_ai(json_ai)
-predictor = predictor_from_code(code)
-
-predictor.learn(df)
-
-
-
-
-
-
-
-
-INFO:lightwood-53131:Dropping features: []
-INFO:lightwood-53131:Performing statistical analysis on data
-INFO:lightwood-53131:Starting statistical analysis
-INFO:lightwood-53131:Finished statistical analysis
-INFO:lightwood-53131:Cleaning the data
-INFO:lightwood-53131:Splitting the data into train/test
-WARNING:lightwood-53131:Cannot stratify, got subsets of length: [25, 24, 23, 22, 22, 22, 22, 22, 22, 21] | Splitting without stratification
-INFO:lightwood-53131:Preparing the encoders
-INFO:lightwood-53131:Encoder prepping dict length of: 1
-INFO:lightwood-53131:Encoder prepping dict length of: 2
-INFO:lightwood-53131:Encoder prepping dict length of: 3
-INFO:lightwood-53131:Encoder prepping dict length of: 4
-INFO:lightwood-53131:Encoder prepping dict length of: 5
-INFO:lightwood-53131:Encoder prepping dict length of: 6
-INFO:lightwood-53131:Encoder prepping dict length of: 7
-
-
-
-
-
-
-
-model_correlation.py
-model_correlation
-
-
-
-
-
-
-
-INFO:lightwood-53131:Done running for: Development Index
-INFO:lightwood-53131:Done running for: Population
-INFO:lightwood-53131:Done running for: Area (sq. mi.)
-INFO:lightwood-53131:Done running for: Pop. Density
-INFO:lightwood-53131:Done running for: GDP ($ per capita)
-INFO:lightwood-53131:Done running for: Literacy (%)
-INFO:lightwood-53131:Done running for: Infant mortality
-INFO:lightwood-53131:Featurizing the data
-INFO:lightwood-53131:Training the mixers
-/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-WARNING:lightwood-53131:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead
-INFO:lightwood-53131:Loss of 2.1644320487976074 with learning rate 0.0001
-INFO:lightwood-53131:Loss of 2.4373621940612793 with learning rate 0.00014
-INFO:lightwood-53131:Found learning rate of: 0.0001
-/home/natasha/mdb/lib/python3.8/site-packages/pytorch_ranger/ranger.py:172: UserWarning: This overload of addcmul_ is deprecated:
-        addcmul_(Number value, Tensor tensor1, Tensor tensor2)
-Consider using one of the following signatures instead:
-        addcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at  /pytorch/torch/csrc/utils/python_arg_parser.cpp:1005.)
-  exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)
-DEBUG:lightwood-53131:Loss @ epoch 1: 1.6043835878372192
-DEBUG:lightwood-53131:Loss @ epoch 2: 1.614564061164856
-DEBUG:lightwood-53131:Loss @ epoch 3: 1.6116881370544434
-DEBUG:lightwood-53131:Loss @ epoch 4: 1.6085857152938843
-DEBUG:lightwood-53131:Loss @ epoch 5: 1.5999916791915894
-DEBUG:lightwood-53131:Loss @ epoch 6: 1.5959053039550781
-DEBUG:lightwood-53131:Loss @ epoch 7: 1.5914497375488281
-DEBUG:lightwood-53131:Loss @ epoch 8: 1.586897850036621
-DEBUG:lightwood-53131:Loss @ epoch 9: 1.582642912864685
-DEBUG:lightwood-53131:Loss @ epoch 10: 1.5786747932434082
-DEBUG:lightwood-53131:Loss @ epoch 11: 1.5690934658050537
-DEBUG:lightwood-53131:Loss @ epoch 12: 1.5649737119674683
-DEBUG:lightwood-53131:Loss @ epoch 13: 1.5617222785949707
-DEBUG:lightwood-53131:Loss @ epoch 14: 1.5580050945281982
-DEBUG:lightwood-53131:Loss @ epoch 15: 1.55539071559906
-DEBUG:lightwood-53131:Loss @ epoch 16: 1.5526844263076782
-DEBUG:lightwood-53131:Loss @ epoch 17: 1.5471524000167847
-DEBUG:lightwood-53131:Loss @ epoch 18: 1.5454663038253784
-DEBUG:lightwood-53131:Loss @ epoch 19: 1.5436923503875732
-DEBUG:lightwood-53131:Loss @ epoch 20: 1.5420359373092651
-DEBUG:lightwood-53131:Loss @ epoch 21: 1.5407888889312744
-DEBUG:lightwood-53131:Loss @ epoch 22: 1.5401763916015625
-DEBUG:lightwood-53131:Loss @ epoch 23: 1.5390430688858032
-DEBUG:lightwood-53131:Loss @ epoch 24: 1.53862726688385
-DEBUG:lightwood-53131:Loss @ epoch 25: 1.5379230976104736
-DEBUG:lightwood-53131:Loss @ epoch 26: 1.5374646186828613
-DEBUG:lightwood-53131:Loss @ epoch 27: 1.5376394987106323
-DEBUG:lightwood-53131:Loss @ epoch 28: 1.5372562408447266
-DEBUG:lightwood-53131:Loss @ epoch 29: 1.537568211555481
-DEBUG:lightwood-53131:Loss @ epoch 1: 1.5716121435165404
-DEBUG:lightwood-53131:Loss @ epoch 2: 1.5647767543792725
-DEBUG:lightwood-53131:Loss @ epoch 3: 1.5728715658187866
-DEBUG:lightwood-53131:Loss @ epoch 4: 1.5768787622451783
-DEBUG:lightwood-53131:Loss @ epoch 5: 1.5729807138442993
-DEBUG:lightwood-53131:Loss @ epoch 6: 1.56294903755188
-DEBUG:lightwood-53131:Loss @ epoch 7: 1.5892131805419922
-INFO:lightwood-53131:Started fitting LGBM model
-/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-INFO:lightwood-53131:A single GBM iteration takes 0.1 seconds
-INFO:lightwood-53131:Training GBM (<module 'lightgbm' from '/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/__init__.py'>) with 176 iterations given 22 seconds constraint
-/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-INFO:lightwood-53131:Lightgbm model contains 880 weak estimators
-INFO:lightwood-53131:Updating lightgbm model with 10.5 iterations
-/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-INFO:lightwood-53131:Model now has a total of 880 weak estimators
-WARNING:lightwood-53131:Exception: Unspported categorical type for regression when training mixer: <lightwood.mixer.regression.Regression object at 0x7fa84c42f640>
-INFO:lightwood-53131:Ensembling the mixer
-INFO:lightwood-53131:Mixer: Neural got accuracy: 0.2916666666666667
-INFO:lightwood-53131:Mixer: LightGBM got accuracy: 1.0
-INFO:lightwood-53131:Picked best mixer: LightGBM
-INFO:lightwood-53131:Analyzing the ensemble of mixers
-INFO:lightwood-53131:Adjustment on validation requested.
-INFO:lightwood-53131:Updating the mixers
-DEBUG:lightwood-53131:Loss @ epoch 1: 1.532525897026062
-DEBUG:lightwood-53131:Loss @ epoch 2: 1.6230510274569194
-DEBUG:lightwood-53131:Loss @ epoch 3: 1.529026726881663
-DEBUG:lightwood-53131:Loss @ epoch 4: 1.4609563549359639
-DEBUG:lightwood-53131:Loss @ epoch 5: 1.6120732029279072
-INFO:lightwood-53131:Updating lightgbm model with 10.5 iterations
-/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-INFO:lightwood-53131:Model now has a total of 880 weak estimators
-
-
-

Finally, we can visualize the mixer correlation matrix:

-
-
[10]:
-
-
-
-import matplotlib.pyplot as plt
-
-mc = predictor.runtime_analyzer['mixer_correlation']  # newly produced insight
-
-mixer_names = [c.__class__.__name__ for c in predictor.ensemble.mixers]
-
-# plotting code
-fig, ax = plt.subplots()
-im = ax.imshow(mc, cmap='seismic')
-
-# set ticks
-ax.set_xticks(np.arange(mc.shape[0]))
-ax.set_yticks(np.arange(mc.shape[1]))
-
-# set tick labels
-ax.set_xticklabels(mixer_names)
-ax.set_yticklabels(mixer_names)
-
-# show cell values
-for i in range(len(mixer_names)):
-    for j in range(len(mixer_names)):
-        text = ax.text(j, i, round(mc[i, j], 3), ha="center", va="center", color="w")
-
-
-
-
-
-
-
-
-../../_images/tutorials_custom_explainer_custom_explainer_20_0.png -
-
-

Nice! We’ve just added an additional piece of insight regarding the predictor that Lightwood came up with for the task of predicting the Human Development Index of any given country.

-

What this matrix is telling us is whether the predictions of both mixers stored in the ensemble – Neural and LightGBM – have a high correlation or not.

-

This is, of course, a very simple example, but it shows the convenience of such an abstraction within the broader pipeline that Lightwood automates.

-

For more complex examples, you can check out any of the three core analysis blocks that we use:

-
    -
  • lightwood.analysis.nc.calibrate.ICP

  • -
  • lightwood.analysis.helpers.acc_stats.AccStats

  • -
  • lightwood.analysis.helpers.feature_importance.GlobalFeatureImportance

  • -
-
-
[ ]:
-
-
-
-
-
-
-
-
-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/custom_explainer/custom_explainer.ipynb b/docs/tutorials/custom_explainer/custom_explainer.ipynb deleted file mode 100644 index 4f76349a5..000000000 --- a/docs/tutorials/custom_explainer/custom_explainer.ipynb +++ /dev/null @@ -1,592 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Implementing a custom analysis block in Lightwood\n", - "\n", - "\n", - "## Introduction\n", - "\n", - "As you might already know, Lightwood is designed to be a flexible machine learning (ML) library that is able to abstract and automate the entire ML pipeline. Crucially, it is also designed to be extended or modified very easily according to your needs, essentially offering the entire spectrum between fully automated AutoML and a lightweight wrapper for customized ML pipelines.\n", - "\n", - "As such, we can identify several different customizable \"phases\" in the process. The relevant phase for this tutorial is the \"analysis\" that comes after a predictor has been trained. The goal of this phase is to generate useful insights, like accuracy metrics, confusion matrices, feature importance, etc. These particular examples are all included in the core analysis procedure that Lightwood executes.\n", - "\n", - "However, the analysis procedure is structured into a sequential execution of \"analysis blocks\". Each analysis block should generate a well-defined set of insights, as well as handling any actions regarding these at inference time.\n", - "\n", - "As an example, one of the core blocks is the Inductive Conformal Prediction (`ICP`) block, which handles the confidence estimation of all Lightwood predictors. The logic within can be complex at times, but thanks to the block abstraction we can deal with it in a structured manner. As this `ICP` block is used when generating predictions, it implements the two main methods that the `BaseAnalysisBlock` class specifies: `.analyze()` to setup everything that is needed, and `.explain()` to actually estimate the confidence in any given prediction.\n", - "\n", - "\n", - "## Objective\n", - "\n", - "In this tutorial, we will go through the steps required to implement your own analysis blocks to customize the insights of any Lightwood predictor!\n", - "\n", - "In particular, we will implement a \"model correlation heatmap\" block: we want to compare the predictions of all mixers inside a `BestOf` ensemble object, to understand how they might differ in their overall behavior." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'1.3.0'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from typing import Dict, Tuple\n", - "import pandas as pd\n", - "import lightwood\n", - "lightwood.__version__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 1: figuring out what we need\n", - "\n", - "When designing an analysis block, an important choice needs to be made: will this block operate when calling the predictor? Or is it only going to describe its performance once in the held-out validation dataset?\n", - "\n", - "Being in the former case means we need to implement both `.analyze()` and `.explain()` methods, while the latter case only needs an `.analyze()` method. Our `ModelCorrelationHeatmap` belongs to this second category.\n", - "\n", - "Let's start the implementation by inheriting from `BaseAnalysisBlock`:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.analysis import BaseAnalysisBlock\n", - "\n", - "class ModelCorrelationHeatmap(BaseAnalysisBlock):\n", - " def __init__(self):\n", - " super().__init__()\n", - " \n", - " def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n", - " return info\n", - "\n", - " def explain(self,\n", - " row_insights: pd.DataFrame,\n", - " global_insights: Dict[str, object], **kwargs) -> Tuple[pd.DataFrame, Dict[str, object]]:\n", - " \n", - " return row_insights, global_insights" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "<__main__.ModelCorrelationHeatmap at 0x7fa85c015970>" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ModelCorrelationHeatmap()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Right now, our newly created analysis block doesn't do much, apart from returning the `info` and insights (`row_insights` and `global_insights`) exactly as it received them from the previous block.\n", - "\n", - "As previously discussed, we only need to implement a procedure that runs post-training, no action is required at inference time. This means we can use the default `.explain()` behavior in the parent class:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "class ModelCorrelationHeatmap(BaseAnalysisBlock):\n", - " def __init__(self):\n", - " super().__init__()\n", - " \n", - " def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n", - " return info" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 2: Implementing the custom analysis block\n", - "\n", - "Okay, now for the fun bit: we have to implement a correlation heatmap between the predictions of all mixers inside a `BestOf` ensemble. This is currently the only ensemble implemented in Lightwood, but it is a good idea to explicitly check that the type of the ensemble is what we expect.\n", - "\n", - "A natural question to ask at this point is: what information do we have to implement the procedure? You'll note that, apart from the `info` dictionary, we receive a `kwargs` dictionary. You can check out the full documentation for more details, but the keys (and respective value types) exposed in this object by default are:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "kwargs = {\n", - " 'predictor': 'lightwood.ensemble.BaseEnsemble',\n", - " 'target': 'str',\n", - " 'input_cols': 'list',\n", - " 'dtype_dict': 'dict',\n", - " 'normal_predictions': 'pd.DataFrame',\n", - " 'data': 'pd.DataFrame',\n", - " 'train_data': 'lightwood.data.encoded_ds.EncodedDs',\n", - " 'encoded_val_data': 'lightwood.data.encoded_ds.EncodedDs',\n", - " 'is_classification': 'bool',\n", - " 'is_numerical': 'bool',\n", - " 'is_multi_ts': 'bool',\n", - " 'stats_info': 'lightwood.api.types.StatisticalAnalysis',\n", - " 'ts_cfg': 'lightwood.api.types.TimeseriesSettings',\n", - " 'accuracy_functions': 'list',\n", - " 'has_pretrained_text_enc': 'bool'\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As you can see there is lots to work with, but for this example we will focus on using:\n", - "\n", - "1. The `predictor` ensemble\n", - "2. The `encoded_val_data` to generate predictions for each mixer inside the ensemble\n", - "\n", - "And the insight we're want to produce is a matrix that compares the output of all mixers and computes the correlation between them.\n", - "\n", - "Let's implement the algorithm:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Dict\n", - "from types import SimpleNamespace\n", - "\n", - "import numpy as np\n", - "\n", - "from lightwood.ensemble import BestOf\n", - "from lightwood.analysis import BaseAnalysisBlock\n", - "\n", - "\n", - "class ModelCorrelationHeatmap(BaseAnalysisBlock):\n", - " def __init__(self):\n", - " super().__init__()\n", - " \n", - " def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n", - " ns = SimpleNamespace(**kwargs)\n", - " \n", - " # only triggered with the right type of ensemble\n", - " if isinstance(ns.predictor, BestOf):\n", - " \n", - " # store prediction from every mixer\n", - " all_predictions = []\n", - "\n", - " for mixer in ns.predictor.mixers:\n", - " predictions = mixer(ns.encoded_val_data).values # retrieve np.ndarray from the returned pd.DataFrame\n", - " all_predictions.append(predictions.flatten().astype(int)) # flatten and cast labels to int\n", - " \n", - " # calculate correlation matrix\n", - " corrs = np.corrcoef(np.array(all_predictions))\n", - " \n", - " # save inside `info` object\n", - " info['mixer_correlation'] = corrs\n", - " \n", - " return info\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice the use of `SimpleNamespace` for dot notation accessors.\n", - "\n", - "The procedure above is fairly straightforward, as we leverage numpy's `corrcoef()` function to generate the matrix. \n", - "\n", - "Finally, it is very important to add the output to `info` so that it is saved inside the actual predictor object. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 3: Exposing the block to Lightwood\n", - "\n", - "\n", - "To use this in an arbitrary script, we need to add the above class (and all necessary imports) to a `.py` file inside one of the following directories:\n", - "\n", - "* `~/lightwood_modules` (where `~` is your home directory, e.g. `/Users/username/` for macOS and `/home/username/` for linux\n", - "* `/etc/lightwood_modules`\n", - "\n", - "Lightwood will scan these directories and import any class so that they can be found and used by the `JsonAI` code generating module.\n", - "\n", - "**To continue, please save the code cell above as `model_correlation.py` in one of the indicated directories.**" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 4: Final test run\n", - "\n", - "Ok! Everything looks set to try out our custom block. Let's generate a predictor for [this](https://github.com/mindsdb/lightwood/blob/stable/tests/data/hdi.csv) sample dataset, and see whether our new insights are any good.\n", - "\n", - "First, it is important to add our `ModelCorrelationHeatmap` to the `analysis_blocks` attribute of the Json AI object that will generate your predictor code. " - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-53131:Dropping features: []\n", - "INFO:lightwood-53131:Analyzing a sample of 222\n", - "INFO:lightwood-53131:from a total population of 225, this is equivalent to 98.7% of your data.\n", - "INFO:lightwood-53131:Using 15 processes to deduct types.\n", - "INFO:lightwood-53131:Infering type for: Population\n", - "INFO:lightwood-53131:Infering type for: Area (sq. mi.)\n", - "INFO:lightwood-53131:Infering type for: Pop. Density \n", - "INFO:lightwood-53131:Infering type for: GDP ($ per capita)\n", - "INFO:lightwood-53131:Infering type for: Literacy (%)\n", - "INFO:lightwood-53131:Infering type for: Infant mortality \n", - "INFO:lightwood-53131:Infering type for: Development Index\n", - "INFO:lightwood-53131:Column Area (sq. mi.) has data type integer\n", - "INFO:lightwood-53131:Column Population has data type integer\n", - "INFO:lightwood-53131:Column Development Index has data type categorical\n", - "INFO:lightwood-53131:Column Literacy (%) has data type float\n", - "INFO:lightwood-53131:Column GDP ($ per capita) has data type integer\n", - "INFO:lightwood-53131:Column Infant mortality has data type float\n", - "INFO:lightwood-53131:Column Pop. Density has data type float\n", - "INFO:lightwood-53131:Starting statistical analysis\n", - "INFO:lightwood-53131:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "model_correlation.py\n", - "model_correlation\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem\n", - "\n", - "# read dataset\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/stable/tests/data/hdi.csv')\n", - "\n", - "# define the predictive task\n", - "pdef = ProblemDefinition.from_dict({\n", - " 'target': 'Development Index', # column you want to predict\n", - " 'time_aim': 100,\n", - "})\n", - "\n", - "# generate the Json AI intermediate representation from the data and its corresponding settings\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)\n", - "\n", - "# add the custom list of analysis blocks; in this case, composed of a single block\n", - "json_ai.analysis_blocks = [{\n", - " 'module': 'model_correlation.ModelCorrelationHeatmap',\n", - " 'args': {}\n", - "}]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can take a look at the respective Json AI key just to confirm our newly added analysis block is in there:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[{'module': 'model_correlation.ModelCorrelationHeatmap', 'args': {}}]" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "json_ai.analysis_blocks" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we are ready to create a predictor from this Json AI, and subsequently train it:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-53131:Dropping features: []\n", - "INFO:lightwood-53131:Performing statistical analysis on data\n", - "INFO:lightwood-53131:Starting statistical analysis\n", - "INFO:lightwood-53131:Finished statistical analysis\n", - "INFO:lightwood-53131:Cleaning the data\n", - "INFO:lightwood-53131:Splitting the data into train/test\n", - "WARNING:lightwood-53131:Cannot stratify, got subsets of length: [25, 24, 23, 22, 22, 22, 22, 22, 22, 21] | Splitting without stratification\n", - "INFO:lightwood-53131:Preparing the encoders\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 1\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 2\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 3\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 4\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 5\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 6\n", - "INFO:lightwood-53131:Encoder prepping dict length of: 7\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "model_correlation.py\n", - "model_correlation\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-53131:Done running for: Development Index\n", - "INFO:lightwood-53131:Done running for: Population\n", - "INFO:lightwood-53131:Done running for: Area (sq. mi.)\n", - "INFO:lightwood-53131:Done running for: Pop. Density \n", - "INFO:lightwood-53131:Done running for: GDP ($ per capita)\n", - "INFO:lightwood-53131:Done running for: Literacy (%)\n", - "INFO:lightwood-53131:Done running for: Infant mortality \n", - "INFO:lightwood-53131:Featurizing the data\n", - "INFO:lightwood-53131:Training the mixers\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "WARNING:lightwood-53131:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\n", - "INFO:lightwood-53131:Loss of 2.1644320487976074 with learning rate 0.0001\n", - "INFO:lightwood-53131:Loss of 2.4373621940612793 with learning rate 0.00014\n", - "INFO:lightwood-53131:Found learning rate of: 0.0001\n", - "/home/natasha/mdb/lib/python3.8/site-packages/pytorch_ranger/ranger.py:172: UserWarning: This overload of addcmul_ is deprecated:\n", - "\taddcmul_(Number value, Tensor tensor1, Tensor tensor2)\n", - "Consider using one of the following signatures instead:\n", - "\taddcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:1005.)\n", - " exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)\n", - "DEBUG:lightwood-53131:Loss @ epoch 1: 1.6043835878372192\n", - "DEBUG:lightwood-53131:Loss @ epoch 2: 1.614564061164856\n", - "DEBUG:lightwood-53131:Loss @ epoch 3: 1.6116881370544434\n", - "DEBUG:lightwood-53131:Loss @ epoch 4: 1.6085857152938843\n", - "DEBUG:lightwood-53131:Loss @ epoch 5: 1.5999916791915894\n", - "DEBUG:lightwood-53131:Loss @ epoch 6: 1.5959053039550781\n", - "DEBUG:lightwood-53131:Loss @ epoch 7: 1.5914497375488281\n", - "DEBUG:lightwood-53131:Loss @ epoch 8: 1.586897850036621\n", - "DEBUG:lightwood-53131:Loss @ epoch 9: 1.582642912864685\n", - "DEBUG:lightwood-53131:Loss @ epoch 10: 1.5786747932434082\n", - "DEBUG:lightwood-53131:Loss @ epoch 11: 1.5690934658050537\n", - "DEBUG:lightwood-53131:Loss @ epoch 12: 1.5649737119674683\n", - "DEBUG:lightwood-53131:Loss @ epoch 13: 1.5617222785949707\n", - "DEBUG:lightwood-53131:Loss @ epoch 14: 1.5580050945281982\n", - "DEBUG:lightwood-53131:Loss @ epoch 15: 1.55539071559906\n", - "DEBUG:lightwood-53131:Loss @ epoch 16: 1.5526844263076782\n", - "DEBUG:lightwood-53131:Loss @ epoch 17: 1.5471524000167847\n", - "DEBUG:lightwood-53131:Loss @ epoch 18: 1.5454663038253784\n", - "DEBUG:lightwood-53131:Loss @ epoch 19: 1.5436923503875732\n", - "DEBUG:lightwood-53131:Loss @ epoch 20: 1.5420359373092651\n", - "DEBUG:lightwood-53131:Loss @ epoch 21: 1.5407888889312744\n", - "DEBUG:lightwood-53131:Loss @ epoch 22: 1.5401763916015625\n", - "DEBUG:lightwood-53131:Loss @ epoch 23: 1.5390430688858032\n", - "DEBUG:lightwood-53131:Loss @ epoch 24: 1.53862726688385\n", - "DEBUG:lightwood-53131:Loss @ epoch 25: 1.5379230976104736\n", - "DEBUG:lightwood-53131:Loss @ epoch 26: 1.5374646186828613\n", - "DEBUG:lightwood-53131:Loss @ epoch 27: 1.5376394987106323\n", - "DEBUG:lightwood-53131:Loss @ epoch 28: 1.5372562408447266\n", - "DEBUG:lightwood-53131:Loss @ epoch 29: 1.537568211555481\n", - "DEBUG:lightwood-53131:Loss @ epoch 1: 1.5716121435165404\n", - "DEBUG:lightwood-53131:Loss @ epoch 2: 1.5647767543792725\n", - "DEBUG:lightwood-53131:Loss @ epoch 3: 1.5728715658187866\n", - "DEBUG:lightwood-53131:Loss @ epoch 4: 1.5768787622451783\n", - "DEBUG:lightwood-53131:Loss @ epoch 5: 1.5729807138442993\n", - "DEBUG:lightwood-53131:Loss @ epoch 6: 1.56294903755188\n", - "DEBUG:lightwood-53131:Loss @ epoch 7: 1.5892131805419922\n", - "INFO:lightwood-53131:Started fitting LGBM model\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:A single GBM iteration takes 0.1 seconds\n", - "INFO:lightwood-53131:Training GBM () with 176 iterations given 22 seconds constraint\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:Lightgbm model contains 880 weak estimators\n", - "INFO:lightwood-53131:Updating lightgbm model with 10.5 iterations\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:Model now has a total of 880 weak estimators\n", - "WARNING:lightwood-53131:Exception: Unspported categorical type for regression when training mixer: \n", - "INFO:lightwood-53131:Ensembling the mixer\n", - "INFO:lightwood-53131:Mixer: Neural got accuracy: 0.2916666666666667\n", - "INFO:lightwood-53131:Mixer: LightGBM got accuracy: 1.0\n", - "INFO:lightwood-53131:Picked best mixer: LightGBM\n", - "INFO:lightwood-53131:Analyzing the ensemble of mixers\n", - "INFO:lightwood-53131:Adjustment on validation requested.\n", - "INFO:lightwood-53131:Updating the mixers\n", - "DEBUG:lightwood-53131:Loss @ epoch 1: 1.532525897026062\n", - "DEBUG:lightwood-53131:Loss @ epoch 2: 1.6230510274569194\n", - "DEBUG:lightwood-53131:Loss @ epoch 3: 1.529026726881663\n", - "DEBUG:lightwood-53131:Loss @ epoch 4: 1.4609563549359639\n", - "DEBUG:lightwood-53131:Loss @ epoch 5: 1.6120732029279072\n", - "INFO:lightwood-53131:Updating lightgbm model with 10.5 iterations\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "/home/natasha/mdb/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "INFO:lightwood-53131:Model now has a total of 880 weak estimators\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import code_from_json_ai, predictor_from_code\n", - "\n", - "code = code_from_json_ai(json_ai)\n", - "predictor = predictor_from_code(code)\n", - "\n", - "predictor.learn(df)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Finally, we can visualize the mixer correlation matrix:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAR8AAAD4CAYAAADVYeLDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAAR9ElEQVR4nO3bfXBV9Z3H8feHB0coRIqgYMgCasbRUgSfYH1aRx0V3QrVjsVhxmJVKmJx2qHVtlrBdjs+tCpailq0DFMpOutMDSxbHWqprIoCliiiGFpKIVanaCUiggjf/SOX9BIJD5KbbyCf10xm7jnnd3/3d0jyzrknQRGBmVlLa5e9ADNrmxwfM0vh+JhZCsfHzFI4PmaWokP2ArJ0lqJb9iJsr/ydQ7KXYHtt/bqI6LmzI202Pt2AMdmLsL0yiTOyl2B7bc7qpo74bZeZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB89lPDH36Y77zzDte9+mqTY4ZNnsz4mhrGVlfTe/DgFlxd23b++SfwxhtTqal5kBtv/EqT4y655FQiZnPiiUcD0L17V5555r/44IPHuf/+b+ww9rLLTqe6+j6WLZvC7bd/raTrbykljY+kkPSzou0JkiaW8jULrzNf0kmlfp1MS6dP59cXXNDk8cphw+heWcl9lZXMHjOGi6ZObcHVtV3t2rVjypRrGTZsIscdN47LLz+TY4+t+NS4Ll06ccMNX2Lhwjca9m3a9DG33PIoEyY8ssPY7t27ctddX+ecc25mwIBx9Or1ec4+e2DJz6XUSn3lsxm4RFKP5pxU9dr0VdvqBQv46L33mjx+zPDhVM+YAcDaF1/k4G7d6NKrV0str8065ZRKVq78O6tWvcOWLZ8wa9azDB8+5FPjfvSjUdxxxxNs2rSlYd/GjZt57rnlO+wDOPLIXtTUvMW6dXUAzJtXzaWXnlbaE2kBpf4G/gR4CPhW4wOSekp6QtKiwsdphf0TJU0oGrdMUr/CxwpJM4BlQIWkqZIWS3pN0qQSn8t+pay8nLo1axq269aupay8PHFFbUN5+aGsWbOuYXvt2ncpLz90hzGDBx9FRUVP5s5dvEdzrlz5FsccU07fvofRvn07RowYSkVFs/48T9GhBV5jCvCKpDsb7Z8M3BMR/yfp34CngGN3M1cl8LWIWAgg6QcR8Z6k9sDvJQ2MiFeaerKkMcAYgEM+48mY7QtJ3H33VYwefe8eP+f99z9k7Nhf8Nhj32XbtuD551/nqKN6l26RLaTk8YmIusLVynjgo6JD5wLHSdq+XSapy26mW709PAWXFYLSAegNHAc0GZ+IeIj6KzGOkGKvTmQ/U1dbS1nFv+41lPXpQ11tbeKK2oba2nd3uCrp0+dQamvfbdju2rUTAwb0Zf78nwDQq9fnqaq6mYsv/jFLlqxsct45cxYxZ84iAK655ny2bt1WojNoOS113+Re4Crgc41ee2hEDCp8lEfEBurfqhWv6+Cixx9ufyCpPzABOCciBgL/02hsm7aiqorjr7gCgD5DhrB5/Xo2vP128qoOfIsW1VBZeQT9+h1Ox44dGDnyTKqqXmo4Xle3kZ49R9G//9X07381Cxeu2G14AHr2rL9W79btc1x33YVMm/Z0Sc+jJbTE2y4Kb40epz5A22/lPw18E7gLQNKgiFgK/BX4z8K+E4D+TUxbRn2M1ks6HBgGzC/NGbQ+l86cSb+zzqJzjx58e80a/nDrrbTv2BGAxQ8+SM3cuVReeCHjV65ky8aNPHnllckrbhu2bt3G9dc/wFNPTaJ9+3Y88sg8li//G5MmjWLx4hpmz35pl89ftWoaZWWdOeigDowYMZTzzvshr7++hsmTr+H44+u/FW67bRY1NW+1xOmUlCJK9+5D0oaI6FJ4fDiwCrgzIiYWfgM2hfr7PB2AZyPiWkmdgCeBcuBF4N+pDwvAnIgYUDT/dOBUYA2wHqiKiOmS5gMTIqLJO3pHSDGmWc/WSm1S/c8k26/MWRIRO/2zl5Je+WwPT+HxO0Dnou11wFd38pyPgPOamHJAo7Gjm3jds/Z+tWbWktr038qYWR7Hx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNLoYjIXkMKqVvAGdnLsL1wK3Oyl2B7aRIsiYiTdnbMVz5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxaeXOP/8E3nhjKjU1D3LjjV9pctwll5xKxGxOPPFoALp378ozz/wXH3zwOPff/40dxl522elUV9/HsmVTuP32r5V0/fYvwx9+mO+88w7Xvfpqk2OGTZ7M+JoaxlZX03vw4BZcXcvbbXwkbdjJvmslXbGb542W9PMmjn2/0fbhkmZK+oukJZJekPTlwrGzJK2XtFTSK5LmSTqs6DVC0rlFc40o7Gv6O3U/0a5dO6ZMuZZhwyZy3HHjuPzyMzn22IpPjevSpRM33PAlFi58o2Hfpk0fc8stjzJhwiM7jO3evSt33fV1zjnnZgYMGEevXp/n7LMHlvxcDJZOn86vL7igyeOVw4bRvbKS+yormT1mDBdNndqCq2t5n+nKJyIeiIgZ+/C6DfGRJOC3wLMRcWREnAiMBPoUjV8QEYMiYiCwCBhXdOzVwvjtLgeq92FtrcYpp1SycuXfWbXqHbZs+YRZs55l+PAhnxr3ox+N4o47nmDTpi0N+zZu3Mxzzy3fYR/AkUf2oqbmLdatqwNg3rxqLr30tNKeiAGwesECPnrvvSaPHzN8ONUz6r+t1r74Igd360aXXr1aankt7jPFR9JESRMKj08uXJEslXSXpGVFQ4+Q9DtJNZLuLIy/HehUGP8ocDbwcUQ8sP1JEbE6Iu7fyesK6Ar8s2j3AuAUSR0ldQGOBpZ+lvNqbcrLD2XNmnUN22vXvkt5+aE7jBk8+CgqKnoyd+7iPZpz5cq3OOaYcvr2PYz27dsxYsRQKip6NOu67bMpKy+nbs2ahu26tWspKy9PXFFpdWiGOX4FXBMRLxTCUmwQMBjYDKyQdH9E3CTp+ogYBCBpPPDybl7jDElLgUOBDym6cgICmAecDxwCVAH9dzaJpDHAmPqtTnt2dq2YJO6++ypGj753j5/z/vsfMnbsL3jsse+ybVvw/POvc9RRvUu3SLMm7NMNZ0ndgK4R8UJh18xGQ34fEesjYhOwHOi7B3NOkVQtaVHR7u1vuyqoj92djZ42i/q3XiOB3zQ1d0Q8FBEnRcRJcNDulpKutvbdHa5K+vQ5lNradxu2u3btxIABfZk//yesWjWNoUOPoarq5oabzk2ZM2cRQ4dO4NRTv8OKFbW8+WZtyc7B9lxdbS1lFf+6p1fWpw91tQfu56bUv+3aXPR4Kzu/0noNOGH7RkSMA84BejYxZxVwZvGOiHgJ+CLQIyLe3JcFtyaLFtVQWXkE/fodTseOHRg58kyqql5qOF5Xt5GePUfRv//V9O9/NQsXruDii3/MkiUrdzlvz56HANCt2+e47roLmTbt6ZKeh+2ZFVVVHH9F/e9x+gwZwub169nw9tvJqyqdfXrbFRHvS/pA0pCIeJEdb/zuyhZJHSNiC/AM8BNJYyNi++39zrt47unAn3ey/yZg0x4vfj+wdes2rr/+AZ56ahLt27fjkUfmsXz535g0aRSLF9cwe/ZLu3z+qlXTKCvrzEEHdWDEiKGcd94Pef31NUyefA3HH1//zvS222ZRU/NWS5xOm3fpzJn0O+ssOvfowbfXrOEPt95K+44dAVj84IPUzJ1L5YUXMn7lSrZs3MiTV16ZvOLSUkTseoC0DSj+6rwbKAM2RMRPJQ0BfglsA/4InBQRp0kaXXh8fWGeOcBPI2K+pDuAi4GXI2KUpN7APcAQ4B/U39d5ICIek3QW8CSwChCwHrg6It5s/BpFa54OzImI/276vLoFnLG7fx9rRW5lTvYSbC9NgiX1tzk+bbfx2R1JXSJiQ+HxTUDviLhhnyZtAY7P/sfx2f/sKj7N8duuiyR9rzDXamB0M8xpZge4fY5PRDwGPNYMazGzNsT/t8vMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS+H4mFkKx8fMUjg+ZpbC8TGzFI6PmaVwfMwsheNjZikcHzNL4fiYWQrHx8xSOD5mlsLxMbMUjo+ZpXB8zCyF42NmKRwfM0vh+JhZCsfHzFI4PmaWwvExsxSOj5mlcHzMLIXjY2YpHB8zS6GIyF5DCkn/AFZnr6NEegDrshdhe+VA/Zz1jYieOzvQZuNzIJO0OCJOyl6H7bm2+Dnz2y4zS+H4mFkKx+fA9FD2AmyvtbnPme/5mFkKX/mYWQrHx8xSOD6tkKSQ9LOi7QmSJrbA686X1KZ+3bunJG3Yyb5rJV2xm+eNlvTzJo59v9H24ZJmSvqLpCWSXpD05cKxsyStl7RU0iuS5kk6rOg1QtK5RXONKOz7ymc535bg+LROm4FLJPVozklVz5/zZhIRD0TEjH2YoiE+kgT8Fng2Io6MiBOBkUCfovELImJQRAwEFgHjio69Whi/3eVA9T6sreT8hdg6fUL9bz++1fiApJ6SnpC0qPBxWmH/REkTisYtk9Sv8LFC0gxgGVAhaaqkxZJekzSppU7qQFP8by7p5MIVyVJJd0laVjT0CEm/k1Qj6c7C+NuBToXxjwJnAx9HxAPbnxQRqyPi/p28roCuwD+Ldi8ATpHUUVIX4GhgaTOfcrNyfFqvKcAoSYc02j8ZuCciTgYuBabtwVyVwC8i4gsRsRr4QeGvaQcC/yFpYHMuvI36FfCNiBgEbG10bBDwVeCLwFclVUTETcBHhSuZUcAXgJd38xpnSFoK/A04F3ik6FgA84DzgeFA1T6dTQtwfFqpiKgDZgDjGx06F/h54YuwCigr/KTbldURsbBo+zJJLwN/ov6L/rjmWXXbJKkb0DUiXijsmtloyO8jYn1EbAKWA333YM4pkqolLSravf1tVwX1sbuz0dNmUf/WayTwm89wKi2qQ/YCbJfupf6n4a+K9rUDhha+kBtI+oQdf5gcXPT4w6Jx/YEJwMkR8U9J0xuNtea3uejxVnb+ffca9VeyAETEuMI9v8VNzFkFPFG8IyJekvRFYGNEvFn/7qz18pVPKxYR7wGPA1cV7X4a+Ob2DUmDCg//CpxQ2HcC0L+Jacuoj9F6SYcDw5p10W1QRLwPfCBpSGHXyF0ML7ZFUsfC42eAgyWNLTreeRfPPR34807230TRjezWzFc+rd/PgOuLtscDUyS9Qv3n71ngWup/Cl4h6TXgReDNnU0WEdWS/gS8AawBnivh2g8knSWtLdq+u9Hxq4BfStoG/BFYvwdzPgS8IunliBglaQRwj6TvAv+g/ofEjUXjt9/zUWH+qxtPGBH/u4fnk87/vcKsGUjqEhEbCo9vAnpHxA3Jy2rVfOVj1jwukvQ96r+nVgOjc5fT+vnKx8xS+IazmaVwfMwsheNjZikcHzNL4fiYWYr/B6P8xHBYHfiOAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "mc = predictor.runtime_analyzer['mixer_correlation'] # newly produced insight\n", - "\n", - "mixer_names = [c.__class__.__name__ for c in predictor.ensemble.mixers]\n", - "\n", - "# plotting code\n", - "fig, ax = plt.subplots()\n", - "im = ax.imshow(mc, cmap='seismic')\n", - "\n", - "# set ticks\n", - "ax.set_xticks(np.arange(mc.shape[0]))\n", - "ax.set_yticks(np.arange(mc.shape[1]))\n", - "\n", - "# set tick labels\n", - "ax.set_xticklabels(mixer_names)\n", - "ax.set_yticklabels(mixer_names)\n", - "\n", - "# show cell values\n", - "for i in range(len(mixer_names)):\n", - " for j in range(len(mixer_names)):\n", - " text = ax.text(j, i, round(mc[i, j], 3), ha=\"center\", va=\"center\", color=\"w\")\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Nice! We've just added an additional piece of insight regarding the predictor that Lightwood came up with for the task of predicting the Human Development Index of any given country.\n", - "\n", - "What this matrix is telling us is whether the predictions of both mixers stored in the ensemble -- Neural and LightGBM -- have a high correlation or not.\n", - "\n", - "This is, of course, a very simple example, but it shows the convenience of such an abstraction within the broader pipeline that Lightwood automates.\n", - "\n", - "For more complex examples, you can check out any of the three core analysis blocks that we use:\n", - "\n", - "* `lightwood.analysis.nc.calibrate.ICP`\n", - "* `lightwood.analysis.helpers.acc_stats.AccStats`\n", - "* `lightwood.analysis.helpers.feature_importance.GlobalFeatureImportance`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/tutorials/custom_mixer/custom_mixer.html b/docs/tutorials/custom_mixer/custom_mixer.html deleted file mode 100644 index b218e0497..000000000 --- a/docs/tutorials/custom_mixer/custom_mixer.html +++ /dev/null @@ -1,942 +0,0 @@ - - - - - - - - - - Tutorial - Implementing a custom mixer in Lightwood — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Tutorial - Implementing a custom mixer in Lightwood
  • - - -
  • - - - View page source - - -
  • - -
- - -
-
-
-
- - - -
-

Tutorial - Implementing a custom mixer in Lightwood

-
-

Introduction

-

Mixers are the center piece of lightwood, tasked with learning the mapping between the encoded feature and target representation

-
-
-

Objective

-

In this tutorial we’ll be trying to implement a sklearn random forest as a mixer that handles categorical and binary targets.

-
-
-

Step 1: The Mixer Interface

-

The Mixer interface is defined by the BaseMixer class, a mixer needs methods for 4 tasks: * fitting (fit) * predicting (__call__) * construction (__init__) * partial fitting (partial_fit), though this one is optional

-
-
-

Step 2: Writing our mixer

-

I’m going to create a file called random_forest_mixer.py inside /etc/lightwood_modules, this is where lightwood sources custom modules from.

-

Inside of it I’m going to write the following code:

-
-
[1]:
-
-
-
-from lightwood.mixer import BaseMixer
-from lightwood.api.types import PredictionArguments
-from lightwood.data.encoded_ds import EncodedDs, ConcatedEncodedDs
-from lightwood import dtype
-from lightwood.encoder import BaseEncoder
-
-import torch
-import pandas as pd
-from sklearn.ensemble import RandomForestClassifier
-
-
-class RandomForestMixer(BaseMixer):
-    clf: RandomForestClassifier
-
-    def __init__(self, stop_after: int, dtype_dict: dict, target: str, target_encoder: BaseEncoder):
-        super().__init__(stop_after)
-        self.target_encoder = target_encoder
-        # Throw in case someone tries to use this for a problem that's not classification, I'd fail anyway, but this way the error message is more intuitive
-        if dtype_dict[target] not in (dtype.categorical, dtype.binary):
-            raise Exception(f'This mixer can only be used for classification problems! Got target dtype {dtype_dict[target]} instead!')
-
-        # We could also initialize this in `fit` if some of the parameters depend on the input data, since `fit` is called exactly once
-        self.clf = RandomForestClassifier(max_depth=30)
-
-    def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None:
-        X, Y = [], []
-        # By default mixers get some train data and a bit of dev data on which to do early stopping or hyper parameter optimization. For this mixer, we don't need dev data, so we're going to concat the two in order to get more training data. Then, we're going to turn them into an sklearn friendly foramat.
-        for x, y in ConcatedEncodedDs([train_data, dev_data]):
-            X.append(x.tolist())
-            Y.append(y.tolist())
-        self.clf.fit(X, Y)
-
-    def __call__(self, ds: EncodedDs,
-                 args: PredictionArguments = PredictionArguments()) -> pd.DataFrame:
-        # Turn the data into an sklearn friendly format
-        X = []
-        for x, _ in ds:
-            X.append(x.tolist())
-
-        Yh = self.clf.predict(X)
-
-        # Lightwood encoders are meant to decode torch tensors, so we have to cast the predictions first
-        decoded_predictions = self.target_encoder.decode(torch.Tensor(Yh))
-
-        # Finally, turn the decoded predictions into a dataframe with a single column called `prediction`. This is the standard behaviour all lightwood mixers use
-        ydf = pd.DataFrame({'prediction': decoded_predictions})
-
-        return ydf
-
-
-    # We'll skip implementing `partial_fit`, thus making this mixer unsuitable for online training tasks
-
-
-
-
-
-

Step 3: Using our mixer

-

We’re going to use our mixer for diagnosing heart disease using this dataset: https://github.com/mindsdb/benchmarks/blob/main/benchmarks/datasets/heart_disease/data.csv

-

First, since we don’t want to bother writing a Json AI for this dataset from scratch, we’re going to let lightwood auto generate one.

-
-
[2]:
-
-
-
-from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem
-import pandas as pd
-
-# read dataset
-df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/heart_disease/data.csv')
-
-# define the predictive task
-pdef = ProblemDefinition.from_dict({
-    'target': 'target', # column you want to predict
-})
-
-# generate the Json AI intermediate representation from the data and its corresponding settings
-json_ai = json_ai_from_problem(df, problem_definition=pdef)
-
-# Print it (you can also put it in a file and edit it there)
-print(json_ai.to_json())
-
-
-
-
-
-
-
-
-INFO:lightwood-56096:Dropping features: []
-INFO:lightwood-56096:Analyzing a sample of 298
-INFO:lightwood-56096:from a total population of 303, this is equivalent to 98.3% of your data.
-INFO:lightwood-56096:Using 15 processes to deduct types.
-INFO:lightwood-56096:Infering type for: age
-INFO:lightwood-56096:Infering type for: sex
-INFO:lightwood-56096:Infering type for: cp
-INFO:lightwood-56096:Infering type for: trestbps
-INFO:lightwood-56096:Infering type for: fbs
-INFO:lightwood-56096:Infering type for: chol
-INFO:lightwood-56096:Infering type for: thalach
-INFO:lightwood-56096:Infering type for: restecg
-INFO:lightwood-56096:Infering type for: exang
-INFO:lightwood-56096:Infering type for: ca
-INFO:lightwood-56096:Infering type for: slope
-INFO:lightwood-56096:Infering type for: thal
-INFO:lightwood-56096:Column age has data type integer
-INFO:lightwood-56096:Infering type for: target
-INFO:lightwood-56096:Column sex has data type binary
-INFO:lightwood-56096:Column fbs has data type binary
-INFO:lightwood-56096:Column cp has data type categorical
-INFO:lightwood-56096:Infering type for: oldpeak
-INFO:lightwood-56096:Column trestbps has data type integer
-INFO:lightwood-56096:Column chol has data type integer
-INFO:lightwood-56096:Column thalach has data type integer
-INFO:lightwood-56096:Column restecg has data type categorical
-INFO:lightwood-56096:Column exang has data type binary
-INFO:lightwood-56096:Column ca has data type categorical
-INFO:lightwood-56096:Column slope has data type categorical
-INFO:lightwood-56096:Column thal has data type categorical
-INFO:lightwood-56096:Column target has data type binary
-INFO:lightwood-56096:Column oldpeak has data type float
-INFO:lightwood-56096:Starting statistical analysis
-INFO:lightwood-56096:Finished statistical analysis
-
-
-
-
-
-
-
-random_forest_mixer.py
-random_forest_mixer
-{
-    "features": {
-        "age": {
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {}
-            }
-        },
-        "sex": {
-            "encoder": {
-                "module": "Binary.BinaryEncoder",
-                "args": {}
-            }
-        },
-        "cp": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            }
-        },
-        "trestbps": {
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {}
-            }
-        },
-        "chol": {
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {}
-            }
-        },
-        "fbs": {
-            "encoder": {
-                "module": "Binary.BinaryEncoder",
-                "args": {}
-            }
-        },
-        "restecg": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            }
-        },
-        "thalach": {
-            "encoder": {
-                "module": "Integer.NumericEncoder",
-                "args": {}
-            }
-        },
-        "exang": {
-            "encoder": {
-                "module": "Binary.BinaryEncoder",
-                "args": {}
-            }
-        },
-        "oldpeak": {
-            "encoder": {
-                "module": "Float.NumericEncoder",
-                "args": {}
-            }
-        },
-        "slope": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            }
-        },
-        "ca": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            }
-        },
-        "thal": {
-            "encoder": {
-                "module": "Categorical.OneHotEncoder",
-                "args": {}
-            }
-        }
-    },
-    "outputs": {
-        "target": {
-            "data_dtype": "binary",
-            "encoder": {
-                "module": "Binary.BinaryEncoder",
-                "args": {
-                    "is_target": "True",
-                    "target_class_distribution": "$statistical_analysis.target_class_distribution"
-                }
-            },
-            "mixers": [
-                {
-                    "module": "Neural",
-                    "args": {
-                        "fit_on_dev": true,
-                        "stop_after": "$problem_definition.seconds_per_mixer",
-                        "search_hyperparameters": true
-                    }
-                },
-                {
-                    "module": "LightGBM",
-                    "args": {
-                        "stop_after": "$problem_definition.seconds_per_mixer",
-                        "fit_on_dev": true
-                    }
-                },
-                {
-                    "module": "Regression",
-                    "args": {
-                        "stop_after": "$problem_definition.seconds_per_mixer"
-                    }
-                }
-            ],
-            "ensemble": {
-                "module": "BestOf",
-                "args": {
-                    "args": "$pred_args",
-                    "accuracy_functions": "$accuracy_functions",
-                    "ts_analysis": null
-                }
-            }
-        }
-    },
-    "problem_definition": {
-        "target": "target",
-        "pct_invalid": 2,
-        "unbias_target": true,
-        "seconds_per_mixer": 2364,
-        "seconds_per_encoder": 0,
-        "time_aim": 10642.1306731291,
-        "target_weights": null,
-        "positive_domain": false,
-        "timeseries_settings": {
-            "is_timeseries": false,
-            "order_by": null,
-            "window": null,
-            "group_by": null,
-            "use_previous_target": true,
-            "nr_predictions": null,
-            "historical_columns": null,
-            "target_type": "",
-            "allow_incomplete_history": false
-        },
-        "anomaly_detection": true,
-        "ignore_features": [],
-        "fit_on_all": true,
-        "strict_mode": true,
-        "seed_nr": 420
-    },
-    "identifiers": {},
-    "accuracy_functions": [
-        "balanced_accuracy_score"
-    ]
-}
-
-
-

Now we have to edit the mixers key of this json ai to tell lightwood to use our custom mixer. We can use it together with the others, and have it ensembled with them at the end, or standalone. In this case I’m going to replace all existing mixers with this one

-
-
[3]:
-
-
-
-json_ai.outputs['target'].mixers = [{
-    'module': 'random_forest_mixer.RandomForestMixer',
-    'args': {
-        'stop_after': '$problem_definition.seconds_per_mixer',
-        'dtype_dict': '$dtype_dict',
-        'target': '$target',
-                'target_encoder': '$encoders[self.target]'
-
-    }
-}]
-
-
-
-

Then we’ll generate some code, and finally turn that code into a predictor object and fit it on the original data.

-
-
[4]:
-
-
-
-from lightwood.api.high_level import code_from_json_ai, predictor_from_code
-
-code = code_from_json_ai(json_ai)
-predictor = predictor_from_code(code)
-
-
-
-
-
-
-
-
-random_forest_mixer.py
-random_forest_mixer
-
-
-
-
[5]:
-
-
-
-predictor.learn(df)
-
-
-
-
-
-
-
-
-INFO:lightwood-56096:Dropping features: []
-INFO:lightwood-56096:Performing statistical analysis on data
-INFO:lightwood-56096:Starting statistical analysis
-INFO:lightwood-56096:Finished statistical analysis
-INFO:lightwood-56096:Cleaning the data
-INFO:lightwood-56096:Splitting the data into train/test
-INFO:lightwood-56096:Preparing the encoders
-INFO:lightwood-56096:Encoder prepping dict length of: 1
-INFO:lightwood-56096:Encoder prepping dict length of: 2
-INFO:lightwood-56096:Encoder prepping dict length of: 3
-INFO:lightwood-56096:Encoder prepping dict length of: 4
-INFO:lightwood-56096:Encoder prepping dict length of: 5
-INFO:lightwood-56096:Encoder prepping dict length of: 6
-INFO:lightwood-56096:Encoder prepping dict length of: 7
-INFO:lightwood-56096:Encoder prepping dict length of: 8
-INFO:lightwood-56096:Encoder prepping dict length of: 9
-INFO:lightwood-56096:Encoder prepping dict length of: 10
-INFO:lightwood-56096:Encoder prepping dict length of: 11
-INFO:lightwood-56096:Encoder prepping dict length of: 12
-INFO:lightwood-56096:Encoder prepping dict length of: 13
-INFO:lightwood-56096:Encoder prepping dict length of: 14
-INFO:lightwood-56096:Done running for: target
-INFO:lightwood-56096:Done running for: age
-INFO:lightwood-56096:Done running for: sex
-INFO:lightwood-56096:Done running for: cp
-INFO:lightwood-56096:Done running for: trestbps
-INFO:lightwood-56096:Done running for: chol
-INFO:lightwood-56096:Done running for: fbs
-INFO:lightwood-56096:Done running for: restecg
-INFO:lightwood-56096:Done running for: thalach
-INFO:lightwood-56096:Done running for: exang
-INFO:lightwood-56096:Done running for: oldpeak
-INFO:lightwood-56096:Done running for: slope
-INFO:lightwood-56096:Done running for: ca
-INFO:lightwood-56096:Done running for: thal
-INFO:lightwood-56096:Featurizing the data
-INFO:lightwood-56096:Training the mixers
-INFO:lightwood-56096:Ensembling the mixer
-INFO:lightwood-56096:Mixer: RandomForestMixer got accuracy: 0.8149038461538461
-INFO:lightwood-56096:Picked best mixer: RandomForestMixer
-INFO:lightwood-56096:Analyzing the ensemble of mixers
-INFO:lightwood-56096:Adjustment on validation requested.
-INFO:lightwood-56096:Updating the mixers
-
-
-

Finally, we can use the trained predictor to make some predictions, or save it to a pickle for later use

-
-
[6]:
-
-
-
-predictions = predictor.predict(pd.DataFrame({
-    'age': [63, 15, None],
-    'sex': [1, 1, 0],
-    'thal': [3, 1, 1]
-}))
-print(predictions)
-
-predictor.save('my_custom_heart_disease_predictor.pickle')
-
-
-
-
-
-
-
-
-INFO:lightwood-56096:Dropping features: []
-INFO:lightwood-56096:Cleaning the data
-INFO:lightwood-56096:AccStats.explain() has not been implemented, no modifications will be done to the data insights.
-INFO:lightwood-56096:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.
-
-
-
-
-
-
-
-  prediction truth  confidence
-0          0  None        0.95
-1          0  None        0.94
-2          1  None        0.97
-
-
-

That’s it, all it takes to solve a predictive problem with lightwood using your own custom mixer.

-
-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/custom_mixer/custom_mixer.ipynb b/docs/tutorials/custom_mixer/custom_mixer.ipynb deleted file mode 100644 index 0e5706b91..000000000 --- a/docs/tutorials/custom_mixer/custom_mixer.ipynb +++ /dev/null @@ -1,530 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Implementing a custom mixer in Lightwood\n", - "\n", - "\n", - "## Introduction\n", - "\n", - "Mixers are the center piece of lightwood, tasked with learning the mapping between the encoded feature and target representation\n", - "\n", - "\n", - "## Objective\n", - "\n", - "In this tutorial we'll be trying to implement a sklearn random forest as a mixer that handles categorical and binary targets. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 1: The Mixer Interface\n", - "\n", - "The Mixer interface is defined by the `BaseMixer` class, a mixer needs methods for 4 tasks:\n", - "* fitting (`fit`)\n", - "* predicting (`__call__`)\n", - "* construction (`__init__`)\n", - "* partial fitting (`partial_fit`), though this one is optional" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 2: Writing our mixer\n", - "\n", - "I'm going to create a file called `random_forest_mixer.py` inside `/etc/lightwood_modules`, this is where lightwood sources custom modules from.\n", - "\n", - "Inside of it I'm going to write the following code:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.mixer import BaseMixer\n", - "from lightwood.api.types import PredictionArguments\n", - "from lightwood.data.encoded_ds import EncodedDs, ConcatedEncodedDs\n", - "from lightwood import dtype\n", - "from lightwood.encoder import BaseEncoder\n", - "\n", - "import torch\n", - "import pandas as pd\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "\n", - "\n", - "class RandomForestMixer(BaseMixer):\n", - " clf: RandomForestClassifier\n", - "\n", - " def __init__(self, stop_after: int, dtype_dict: dict, target: str, target_encoder: BaseEncoder):\n", - " super().__init__(stop_after)\n", - " self.target_encoder = target_encoder\n", - " # Throw in case someone tries to use this for a problem that's not classification, I'd fail anyway, but this way the error message is more intuitive\n", - " if dtype_dict[target] not in (dtype.categorical, dtype.binary):\n", - " raise Exception(f'This mixer can only be used for classification problems! Got target dtype {dtype_dict[target]} instead!')\n", - "\n", - " # We could also initialize this in `fit` if some of the parameters depend on the input data, since `fit` is called exactly once\n", - " self.clf = RandomForestClassifier(max_depth=30)\n", - "\n", - " def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None:\n", - " X, Y = [], []\n", - " # By default mixers get some train data and a bit of dev data on which to do early stopping or hyper parameter optimization. For this mixer, we don't need dev data, so we're going to concat the two in order to get more training data. Then, we're going to turn them into an sklearn friendly foramat.\n", - " for x, y in ConcatedEncodedDs([train_data, dev_data]):\n", - " X.append(x.tolist())\n", - " Y.append(y.tolist())\n", - " self.clf.fit(X, Y)\n", - "\n", - " def __call__(self, ds: EncodedDs,\n", - " args: PredictionArguments = PredictionArguments()) -> pd.DataFrame:\n", - " # Turn the data into an sklearn friendly format\n", - " X = []\n", - " for x, _ in ds:\n", - " X.append(x.tolist())\n", - "\n", - " Yh = self.clf.predict(X)\n", - "\n", - " # Lightwood encoders are meant to decode torch tensors, so we have to cast the predictions first\n", - " decoded_predictions = self.target_encoder.decode(torch.Tensor(Yh))\n", - "\n", - " # Finally, turn the decoded predictions into a dataframe with a single column called `prediction`. This is the standard behaviour all lightwood mixers use\n", - " ydf = pd.DataFrame({'prediction': decoded_predictions})\n", - "\n", - " return ydf\n", - "\n", - " \n", - " # We'll skip implementing `partial_fit`, thus making this mixer unsuitable for online training tasks" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 3: Using our mixer\n", - "\n", - "We're going to use our mixer for diagnosing heart disease using this dataset: [https://github.com/mindsdb/benchmarks/blob/main/benchmarks/datasets/heart_disease/data.csv](https://github.com/mindsdb/benchmarks/blob/main/benchmarks/datasets/heart_disease/data.csv)\n", - "\n", - "First, since we don't want to bother writing a Json AI for this dataset from scratch, we're going to let lightwood auto generate one." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-56096:Dropping features: []\n", - "INFO:lightwood-56096:Analyzing a sample of 298\n", - "INFO:lightwood-56096:from a total population of 303, this is equivalent to 98.3% of your data.\n", - "INFO:lightwood-56096:Using 15 processes to deduct types.\n", - "INFO:lightwood-56096:Infering type for: age\n", - "INFO:lightwood-56096:Infering type for: sex\n", - "INFO:lightwood-56096:Infering type for: cp\n", - "INFO:lightwood-56096:Infering type for: trestbps\n", - "INFO:lightwood-56096:Infering type for: fbs\n", - "INFO:lightwood-56096:Infering type for: chol\n", - "INFO:lightwood-56096:Infering type for: thalach\n", - "INFO:lightwood-56096:Infering type for: restecg\n", - "INFO:lightwood-56096:Infering type for: exang\n", - "INFO:lightwood-56096:Infering type for: ca\n", - "INFO:lightwood-56096:Infering type for: slope\n", - "INFO:lightwood-56096:Infering type for: thal\n", - "INFO:lightwood-56096:Column age has data type integer\n", - "INFO:lightwood-56096:Infering type for: target\n", - "INFO:lightwood-56096:Column sex has data type binary\n", - "INFO:lightwood-56096:Column fbs has data type binary\n", - "INFO:lightwood-56096:Column cp has data type categorical\n", - "INFO:lightwood-56096:Infering type for: oldpeak\n", - "INFO:lightwood-56096:Column trestbps has data type integer\n", - "INFO:lightwood-56096:Column chol has data type integer\n", - "INFO:lightwood-56096:Column thalach has data type integer\n", - "INFO:lightwood-56096:Column restecg has data type categorical\n", - "INFO:lightwood-56096:Column exang has data type binary\n", - "INFO:lightwood-56096:Column ca has data type categorical\n", - "INFO:lightwood-56096:Column slope has data type categorical\n", - "INFO:lightwood-56096:Column thal has data type categorical\n", - "INFO:lightwood-56096:Column target has data type binary\n", - "INFO:lightwood-56096:Column oldpeak has data type float\n", - "INFO:lightwood-56096:Starting statistical analysis\n", - "INFO:lightwood-56096:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "random_forest_mixer.py\n", - "random_forest_mixer\n", - "{\n", - " \"features\": {\n", - " \"age\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"sex\": {\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"cp\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"trestbps\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"chol\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"fbs\": {\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"restecg\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"thalach\": {\n", - " \"encoder\": {\n", - " \"module\": \"Integer.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"exang\": {\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"oldpeak\": {\n", - " \"encoder\": {\n", - " \"module\": \"Float.NumericEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"slope\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"ca\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " },\n", - " \"thal\": {\n", - " \"encoder\": {\n", - " \"module\": \"Categorical.OneHotEncoder\",\n", - " \"args\": {}\n", - " }\n", - " }\n", - " },\n", - " \"outputs\": {\n", - " \"target\": {\n", - " \"data_dtype\": \"binary\",\n", - " \"encoder\": {\n", - " \"module\": \"Binary.BinaryEncoder\",\n", - " \"args\": {\n", - " \"is_target\": \"True\",\n", - " \"target_class_distribution\": \"$statistical_analysis.target_class_distribution\"\n", - " }\n", - " },\n", - " \"mixers\": [\n", - " {\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": true,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"LightGBM\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"fit_on_dev\": true\n", - " }\n", - " },\n", - " {\n", - " \"module\": \"Regression\",\n", - " \"args\": {\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\"\n", - " }\n", - " }\n", - " ],\n", - " \"ensemble\": {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " \"ts_analysis\": null\n", - " }\n", - " }\n", - " }\n", - " },\n", - " \"problem_definition\": {\n", - " \"target\": \"target\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": true,\n", - " \"seconds_per_mixer\": 2364,\n", - " \"seconds_per_encoder\": 0,\n", - " \"time_aim\": 10642.1306731291,\n", - " \"target_weights\": null,\n", - " \"positive_domain\": false,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": false,\n", - " \"order_by\": null,\n", - " \"window\": null,\n", - " \"group_by\": null,\n", - " \"use_previous_target\": true,\n", - " \"nr_predictions\": null,\n", - " \"historical_columns\": null,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": false\n", - " },\n", - " \"anomaly_detection\": true,\n", - " \"ignore_features\": [],\n", - " \"fit_on_all\": true,\n", - " \"strict_mode\": true,\n", - " \"seed_nr\": 420\n", - " },\n", - " \"identifiers\": {},\n", - " \"accuracy_functions\": [\n", - " \"balanced_accuracy_score\"\n", - " ]\n", - "}\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem\n", - "import pandas as pd\n", - "\n", - "# read dataset\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/heart_disease/data.csv')\n", - "\n", - "# define the predictive task\n", - "pdef = ProblemDefinition.from_dict({\n", - " 'target': 'target', # column you want to predict\n", - "})\n", - "\n", - "# generate the Json AI intermediate representation from the data and its corresponding settings\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)\n", - "\n", - "# Print it (you can also put it in a file and edit it there)\n", - "print(json_ai.to_json())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we have to edit the `mixers` key of this json ai to tell lightwood to use our custom mixer. We can use it together with the others, and have it ensembled with them at the end, or standalone. In this case I'm going to replace all existing mixers with this one" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "json_ai.outputs['target'].mixers = [{\n", - " 'module': 'random_forest_mixer.RandomForestMixer',\n", - " 'args': {\n", - " 'stop_after': '$problem_definition.seconds_per_mixer',\n", - " 'dtype_dict': '$dtype_dict',\n", - " 'target': '$target',\n", - " 'target_encoder': '$encoders[self.target]'\n", - "\n", - " }\n", - "}]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Then we'll generate some code, and finally turn that code into a predictor object and fit it on the original data." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "random_forest_mixer.py\n", - "random_forest_mixer\n" - ] - } - ], - "source": [ - "from lightwood.api.high_level import code_from_json_ai, predictor_from_code\n", - "\n", - "code = code_from_json_ai(json_ai)\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-56096:Dropping features: []\n", - "INFO:lightwood-56096:Performing statistical analysis on data\n", - "INFO:lightwood-56096:Starting statistical analysis\n", - "INFO:lightwood-56096:Finished statistical analysis\n", - "INFO:lightwood-56096:Cleaning the data\n", - "INFO:lightwood-56096:Splitting the data into train/test\n", - "INFO:lightwood-56096:Preparing the encoders\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 1\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 2\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 3\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 4\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 5\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 6\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 7\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 8\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 9\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 10\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 11\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 12\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 13\n", - "INFO:lightwood-56096:Encoder prepping dict length of: 14\n", - "INFO:lightwood-56096:Done running for: target\n", - "INFO:lightwood-56096:Done running for: age\n", - "INFO:lightwood-56096:Done running for: sex\n", - "INFO:lightwood-56096:Done running for: cp\n", - "INFO:lightwood-56096:Done running for: trestbps\n", - "INFO:lightwood-56096:Done running for: chol\n", - "INFO:lightwood-56096:Done running for: fbs\n", - "INFO:lightwood-56096:Done running for: restecg\n", - "INFO:lightwood-56096:Done running for: thalach\n", - "INFO:lightwood-56096:Done running for: exang\n", - "INFO:lightwood-56096:Done running for: oldpeak\n", - "INFO:lightwood-56096:Done running for: slope\n", - "INFO:lightwood-56096:Done running for: ca\n", - "INFO:lightwood-56096:Done running for: thal\n", - "INFO:lightwood-56096:Featurizing the data\n", - "INFO:lightwood-56096:Training the mixers\n", - "INFO:lightwood-56096:Ensembling the mixer\n", - "INFO:lightwood-56096:Mixer: RandomForestMixer got accuracy: 0.8149038461538461\n", - "INFO:lightwood-56096:Picked best mixer: RandomForestMixer\n", - "INFO:lightwood-56096:Analyzing the ensemble of mixers\n", - "INFO:lightwood-56096:Adjustment on validation requested.\n", - "INFO:lightwood-56096:Updating the mixers\n" - ] - } - ], - "source": [ - "predictor.learn(df)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Finally, we can use the trained predictor to make some predictions, or save it to a pickle for later use" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-56096:Dropping features: []\n", - "INFO:lightwood-56096:Cleaning the data\n", - "INFO:lightwood-56096:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\n", - "INFO:lightwood-56096:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " prediction truth confidence\n", - "0 0 None 0.95\n", - "1 0 None 0.94\n", - "2 1 None 0.97\n" - ] - } - ], - "source": [ - "predictions = predictor.predict(pd.DataFrame({\n", - " 'age': [63, 15, None],\n", - " 'sex': [1, 1, 0],\n", - " 'thal': [3, 1, 1]\n", - "}))\n", - "print(predictions)\n", - "\n", - "predictor.save('my_custom_heart_disease_predictor.pickle')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "That's it, all it takes to solve a predictive problem with lightwood using your own custom mixer." - ] - } - ], - "metadata": { - "interpreter": { - "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" - }, - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/tutorials/custom_splitter/custom_splitter.html b/docs/tutorials/custom_splitter/custom_splitter.html deleted file mode 100644 index 43f745518..000000000 --- a/docs/tutorials/custom_splitter/custom_splitter.html +++ /dev/null @@ -1,1691 +0,0 @@ - - - - - - - - - - Build your own training/testing split — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Build your own training/testing split
  • - - -
  • - - - View page source - - -
  • - -
- - -
-
-
-
- - - -
-

Build your own training/testing split

-
-

Date: 2021.10.07

-

When working with machine learning data, splitting into a “train”, “dev” (or validation) and “test”) set is important. Models use train data to learn representations and update their parameters; dev or validation data is reserved to see how the model may perform on unknown predictions. While it may not be explicitly trained on, it can be used as a stopping criteria, for hyper-parameter tuning, or as a simple sanity check. Lastly, test data is always reserved, hidden from the model, -as a final pass to see what models perform best.

-

Lightwood supports a variety of encoders (Feature engineering procedures) and mixers (predictor algorithms that go from feature vectors to the target). Given the diversity of algorithms, it is appropriate to split data into these three categories when preparing encoders or fitting mixers.

-

Our default approach stratifies labeled data to ensure your train, validation, and test sets are equally represented in all classes. However, in many instances you may want a custom technique to build your own splits. We’ve included the splitter functionality (default found in lightwood.data.splitter) to enable you to build your own.

-

In the following problem, we shall work with a Kaggle dataset around credit card fraud (found here). Fraud detection is difficult because the events we are interested in catching are thankfully rare events. Because of that, there is a large imbalance of classes (in fact, in this dataset, less than 1% of the data are the rare-event).

-

In a supervised technique, we may want to ensure our training data sees the rare event of interest. A random shuffle could potentially miss rare events. We will implement SMOTE to increase the number of positive classes in our training data.

-

Let’s get started!

-
-
[1]:
-
-
-
-import numpy as np
-import pandas as pd
-import torch
-import nltk
-import matplotlib.pyplot as plt
-
-import os
-import sys
-
-# Lightwood modules
-import lightwood as lw
-from lightwood import ProblemDefinition, \
-                      JsonAI, \
-                      json_ai_from_problem, \
-                      code_from_json_ai, \
-                      predictor_from_code
-
-import imblearn # Vers 0.5.0 minimum requirement
-
-
-
-
-

1) Load your data

-

Lightwood works with pandas DataFrames. We can use pandas to load our data. Please download the dataset from the above link and place it in a folder called data/ where this notebook is located.

-
-
[2]:
-
-
-
-# Load the data
-ddir = "data/"
-filename = os.path.join(ddir, "creditcard.csv.zip")
-
-data = pd.read_csv(filename)
-data.head()
-
-
-
-
-
[2]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
TimeV1V2V3V4V5V6V7V8V9...V21V22V23V24V25V26V27V28AmountClass
00.0-1.359807-0.0727812.5363471.378155-0.3383210.4623880.2395990.0986980.363787...-0.0183070.277838-0.1104740.0669280.128539-0.1891150.133558-0.021053149.620
10.01.1918570.2661510.1664800.4481540.060018-0.082361-0.0788030.085102-0.255425...-0.225775-0.6386720.101288-0.3398460.1671700.125895-0.0089830.0147242.690
21.0-1.358354-1.3401631.7732090.379780-0.5031981.8004990.7914610.247676-1.514654...0.2479980.7716790.909412-0.689281-0.327642-0.139097-0.055353-0.059752378.660
31.0-0.966272-0.1852261.792993-0.863291-0.0103091.2472030.2376090.377436-1.387024...-0.1083000.005274-0.190321-1.1755750.647376-0.2219290.0627230.061458123.500
42.0-1.1582330.8777371.5487180.403034-0.4071930.0959210.592941-0.2705330.817739...-0.0094310.798278-0.1374580.141267-0.2060100.5022920.2194220.21515369.990
-

5 rows × 31 columns

-
-
-

We see 31 columns, most of these columns appear numerical. Due to confidentiality reasons, the Kaggle dataset mentions that the columns labeled \(V_i\) indicate principle components (PCs) from a PCA analysis of the original data from the credit card company. There is also a “Time” and “Amount”, two original features that remained. The time references time after the first transaction in the dataset, and amount is how much money was considered in the transaction.

-

You can also see a heavy imbalance in the two classes below:

-
-
[3]:
-
-
-
-f = plt.figure()
-ax = f.add_subplot(1,1,1)
-ax.hist(data['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True)
-ax.set_ylabel("Log Counts")
-ax.set_xticks([0, 1])
-ax.set_xticklabels(["0", "1"])
-ax.set_xlabel("Class")
-ax.set_title("Distribution of Classes")
-
-
-
-
-
[3]:
-
-
-
-
-Text(0.5, 1.0, 'Distribution of Classes')
-
-
-
-
-
-
-../../_images/tutorials_custom_splitter_custom_splitter_5_1.png -
-
-
-
-

2) Create a JSON-AI default object

-

We will now create JSON-AI syntax for our problem based on its specifications. We can do so by setting up a ProblemDefinition. The ProblemDefinition allows us to specify the target, the column we intend to predict, along with other details.

-

The end goal of JSON-AI is to provide **a set of instructions on how to compile a machine learning pipeline*.

-

Our target here is called “Class”, which indicates “0” for no fraud and “1” for fraud. We’ll generate the JSON-AI with the minimal syntax:

-
-
[4]:
-
-
-
-# Setup the problem definition
-problem_definition = {
-    'target': 'Class',
-}
-
-# Generate the j{ai}son syntax
-default_json = json_ai_from_problem(data, problem_definition)
-
-
-
-
-
-
-
-
-
-INFO:lightwood-51500:Dropping features: []
-INFO:lightwood-51500:Analyzing a sample of 18424
-INFO:lightwood-51500:from a total population of 284807, this is equivalent to 6.5% of your data.
-INFO:lightwood-51500:Using 15 processes to deduct types.
-INFO:lightwood-51500:Infering type for: Time
-INFO:lightwood-51500:Infering type for: V1
-INFO:lightwood-51500:Infering type for: V2
-INFO:lightwood-51500:Infering type for: V3
-INFO:lightwood-51500:Infering type for: V4
-INFO:lightwood-51500:Infering type for: V5
-INFO:lightwood-51500:Infering type for: V6
-INFO:lightwood-51500:Infering type for: V7
-INFO:lightwood-51500:Infering type for: V8
-INFO:lightwood-51500:Infering type for: V9
-INFO:lightwood-51500:Infering type for: V10
-INFO:lightwood-51500:Infering type for: V11
-INFO:lightwood-51500:Infering type for: V12
-INFO:lightwood-51500:Infering type for: V13
-INFO:lightwood-51500:Infering type for: V14
-INFO:lightwood-51500:Column Time has data type integer
-INFO:lightwood-51500:Infering type for: V15
-INFO:lightwood-51500:Column V4 has data type float
-INFO:lightwood-51500:Infering type for: V16
-INFO:lightwood-51500:Column V2 has data type float
-INFO:lightwood-51500:Infering type for: V17
-INFO:lightwood-51500:Column V3 has data type float
-INFO:lightwood-51500:Column V1 has data type float
-INFO:lightwood-51500:Infering type for: V18
-INFO:lightwood-51500:Infering type for: V19
-INFO:lightwood-51500:Column V6 has data type float
-INFO:lightwood-51500:Column V5 has data type float
-INFO:lightwood-51500:Infering type for: V20
-INFO:lightwood-51500:Column V7 has data type float
-INFO:lightwood-51500:Infering type for: V21
-INFO:lightwood-51500:Column V8 has data type float
-INFO:lightwood-51500:Infering type for: V22
-INFO:lightwood-51500:Infering type for: V23
-INFO:lightwood-51500:Column V9 has data type float
-INFO:lightwood-51500:Infering type for: V24
-INFO:lightwood-51500:Column V10 has data type float
-INFO:lightwood-51500:Column V13 has data type float
-INFO:lightwood-51500:Column V12 has data type float
-INFO:lightwood-51500:Infering type for: V25
-INFO:lightwood-51500:Column V11 has data type float
-INFO:lightwood-51500:Infering type for: V26
-INFO:lightwood-51500:Column V14 has data type float
-INFO:lightwood-51500:Infering type for: V28
-INFO:lightwood-51500:Infering type for: V27
-INFO:lightwood-51500:Infering type for: Amount
-INFO:lightwood-51500:Column V15 has data type float
-INFO:lightwood-51500:Infering type for: Class
-INFO:lightwood-51500:Column V16 has data type float
-INFO:lightwood-51500:Column V17 has data type float
-INFO:lightwood-51500:Column Class has data type binary
-INFO:lightwood-51500:Column Amount has data type float
-INFO:lightwood-51500:Column V23 has data type float
-INFO:lightwood-51500:Column V18 has data type float
-INFO:lightwood-51500:Column V19 has data type float
-INFO:lightwood-51500:Column V20 has data type float
-INFO:lightwood-51500:Column V28 has data type float
-INFO:lightwood-51500:Column V21 has data type float
-INFO:lightwood-51500:Column V22 has data type float
-INFO:lightwood-51500:Column V26 has data type float
-INFO:lightwood-51500:Column V24 has data type float
-INFO:lightwood-51500:Column V25 has data type float
-INFO:lightwood-51500:Column V27 has data type float
-INFO:lightwood-51500:Starting statistical analysis
-INFO:lightwood-51500:Finished statistical analysis
-
-
-
-
-
-
-
-MyCustomCleaner.py
-MyCustomCleaner
-MyCustomSplitter.py
-MyCustomSplitter
-
-
-

Lightwood looks at each of the many columns and indicates they are mostly float, with exception of “Class” which is binary.

-

You can observe the JSON-AI if you run the command print(default_json.to_json()). Given there are many input features, we won’t print it out.

-

These are the only elements required to get off the ground with JSON-AI. However, we’re interested in making a custom approach. So, let’s make this syntax a file, and introduce our own changes.

-
-
[5]:
-
-
-
-with open("default.json", "w") as fp:
-   fp.write(default_json.to_json())
-
-
-
-
-
-

3) Build your own splitter module

-

For Lightwood, the goal of a splitter is to intake an initial dataset (pre-processed ideally, although you can run the pre-processor on each DataFrame within the splitter) and return a dictionary with the keys “train”, “test”, and “dev” (at minimum). Subsequent steps of the pipeline expect the keys “train”, “test”, and “dev”, so it’s important you assign datasets to these as necessary.

-

We’re going to introduce SMOTE sampling in our splitter. SMOTE allows you to quickly learn an approximation to make extra “samples” that mimic the undersampled class.

-

We will use the package imblearn and scikit-learn to quickly create a train/test split and apply SMOTE to our training data only.

-

NOTE This is simply an example of things you can do with the splitter; whether SMOTE sampling is ideal for your problem depends on the question you’re trying to answer!

-
from lightwood.api.dtype import dtype
-import pandas as pd
-import numpy as np
-from typing import List, Dict
-from itertools import product
-from lightwood.api.types import TimeseriesSettings
-from lightwood.helpers.log import log
-
-
-from imblearn.over_sampling import SMOTE
-from sklearn.model_selection import train_test_split
-
-
-def MySplitter(
-    data: pd.DataFrame,
-    target: str,
-    pct_train: float = 0.8,
-    pct_dev: float = 0.1,
-    seed: int = 1,
-) -> Dict[str, pd.DataFrame]:
-    """
-    Custom splitting function
-
-
-    :param data: Input data
-    :param target: Name of the target
-    :param pct_train: Percentage of data reserved for training, taken out of full data
-    :param pct_dev: Percentage of data reserved for dev, taken out of train data
-    :param seed: Random seed for reproducibility
-
-    :returns: A dictionary containing the keys train, test and dev with their respective data frames.
-    """
-
-    # Shuffle the data
-    data = data.sample(frac=1, random_state=seed).reset_index(drop=True)
-
-    # Split into feature columns + target
-    X = data.iloc[:, data.columns != target]  # .values
-    y = data[target]  # .values
-
-    # Create a train/test split
-    X_train, X_test, y_train, y_test = train_test_split(
-        X, y, train_size=pct_train, random_state=seed, stratify=data[target]
-    )
-
-    X_train, X_dev, y_train, y_dev = train_test_split(
-        X, y, test_size=pct_dev, random_state=seed, stratify=y_train
-    )
-
-    # Create a SMOTE model and bump up underbalanced class JUST for train data
-    SMOTE_model = SMOTE(random_state=seed)
-
-    Xtrain_mod, ytrain_mod = SMOTE_model.fit_resample(X_train, y_train.ravel())
-
-    Xtrain_mod[target] = ytrain_mod
-    X_test[target] = y_test
-    X_dev[target] = y_dev
-
-    return {"train": Xtrain_mod, "test": X_test, "dev": X_dev}
-
-
-
-
-
-

Place your custom module in ~/lightwood_modules

-

We automatically search for custom scripts in your ~/lightwood_modules path. Place your file there. Later, you’ll see when we autogenerate code, that you can change your import location if you choose.

-
-

4) Introduce your custom splitter in JSON-AI

-

Now let’s introduce our custom splitter. JSON-AI keeps a lightweight syntax but fills in many default modules (like splitting, cleaning).

-

For the custom cleaner, we’ll work by editing the “splitter” key. We will change properties within it as follows: (1) “module” - place the name of the function. In our case it will be “MyCustomCleaner.cleaner” (2) “args” - any keyword argument specific to your cleaner’s internals.

-

This will look as follows:

-
"splitter": {
-    "module": "MyCustomSplitter.MySplitter",
-    "args": {
-        "data": "data",
-        "target": "$target",
-        "pct_train": 0.8,
-        "pct_dev": 0.1,
-        "seed": 1
-    }
-},
-
-
-

Let’s copy our file default.json into custom.json and add this block. Then, we can proceed as usual to create python code.

-
-
-

5) Generate Python code representing your ML pipeline

-

Now we’re ready to load up our custom JSON-AI and generate the predictor code!

-

We can do this by first reading in our custom json-syntax, and then calling the function code_from_json_ai.

-
-
[6]:
-
-
-
-# Make changes to your JSON-file and load the custom version
-with open('custom.json', 'r') as fp:
-   modified_json = JsonAI.from_json(fp.read())
-
-#Generate python code that fills in your pipeline
-code = code_from_json_ai(modified_json)
-
-print(code)
-
-# Save code to a file (Optional)
-with open('custom_splitter_pipeline.py', 'w') as fp:
-    fp.write(code)
-
-
-
-
-
-
-
-
-MyCustomCleaner.py
-MyCustomCleaner
-MyCustomSplitter.py
-MyCustomSplitter
-import lightwood
-from lightwood.analysis import *
-from lightwood.api import *
-from lightwood.data import *
-from lightwood.encoder import *
-from lightwood.ensemble import *
-from lightwood.helpers.device import *
-from lightwood.helpers.general import *
-from lightwood.helpers.log import *
-from lightwood.helpers.numeric import *
-from lightwood.helpers.parallelism import *
-from lightwood.helpers.seed import *
-from lightwood.helpers.text import *
-from lightwood.helpers.torch import *
-from lightwood.mixer import *
-import pandas as pd
-from typing import Dict, List
-import os
-from types import ModuleType
-import importlib.machinery
-import sys
-
-
-for import_dir in [os.path.expanduser("~/lightwood_modules"), "/etc/lightwood_modules"]:
-    if os.path.exists(import_dir) and os.access(import_dir, os.R_OK):
-        for file_name in list(os.walk(import_dir))[0][2]:
-            print(file_name)
-            if file_name[-3:] != ".py":
-                continue
-            mod_name = file_name[:-3]
-            print(mod_name)
-            loader = importlib.machinery.SourceFileLoader(
-                mod_name, os.path.join(import_dir, file_name)
-            )
-            module = ModuleType(loader.name)
-            loader.exec_module(module)
-            sys.modules[mod_name] = module
-            exec(f"import {mod_name}")
-
-
-class Predictor(PredictorInterface):
-    target: str
-    mixers: List[BaseMixer]
-    encoders: Dict[str, BaseEncoder]
-    ensemble: BaseEnsemble
-    mode: str
-
-    def __init__(self):
-        seed(420)
-        self.target = "Class"
-        self.mode = "inactive"
-        self.problem_definition = ProblemDefinition.from_dict(
-            {
-                "target": "Class",
-                "pct_invalid": 2,
-                "unbias_target": True,
-                "seconds_per_mixer": 14354,
-                "seconds_per_encoder": 0,
-                "time_aim": 64593.50573948541,
-                "target_weights": None,
-                "positive_domain": False,
-                "timeseries_settings": {
-                    "is_timeseries": False,
-                    "order_by": None,
-                    "window": None,
-                    "group_by": None,
-                    "use_previous_target": True,
-                    "nr_predictions": None,
-                    "historical_columns": None,
-                    "target_type": "",
-                    "allow_incomplete_history": False,
-                },
-                "anomaly_detection": True,
-                "ignore_features": [],
-                "fit_on_all": True,
-                "strict_mode": True,
-                "seed_nr": 420,
-            }
-        )
-        self.accuracy_functions = ["balanced_accuracy_score"]
-        self.identifiers = {}
-        self.dtype_dict = {
-            "Class": "binary",
-            "Time": "integer",
-            "V1": "float",
-            "V2": "float",
-            "V3": "float",
-            "V4": "float",
-            "V5": "float",
-            "V6": "float",
-            "V7": "float",
-            "V8": "float",
-            "V9": "float",
-            "V10": "float",
-            "V11": "float",
-            "V12": "float",
-            "V13": "float",
-            "V14": "float",
-            "V15": "float",
-            "V16": "float",
-            "V17": "float",
-            "V18": "float",
-            "V19": "float",
-            "V20": "float",
-            "V21": "float",
-            "V22": "float",
-            "V23": "float",
-            "V24": "float",
-            "V25": "float",
-            "V26": "float",
-            "V27": "float",
-            "V28": "float",
-            "Amount": "float",
-        }
-
-        # Any feature-column dependencies
-        self.dependencies = {
-            "Time": [],
-            "V1": [],
-            "V2": [],
-            "V3": [],
-            "V4": [],
-            "V5": [],
-            "V6": [],
-            "V7": [],
-            "V8": [],
-            "V9": [],
-            "V10": [],
-            "V11": [],
-            "V12": [],
-            "V13": [],
-            "V14": [],
-            "V15": [],
-            "V16": [],
-            "V17": [],
-            "V18": [],
-            "V19": [],
-            "V20": [],
-            "V21": [],
-            "V22": [],
-            "V23": [],
-            "V24": [],
-            "V25": [],
-            "V26": [],
-            "V27": [],
-            "V28": [],
-            "Amount": [],
-        }
-
-        self.input_cols = [
-            "Time",
-            "V1",
-            "V2",
-            "V3",
-            "V4",
-            "V5",
-            "V6",
-            "V7",
-            "V8",
-            "V9",
-            "V10",
-            "V11",
-            "V12",
-            "V13",
-            "V14",
-            "V15",
-            "V16",
-            "V17",
-            "V18",
-            "V19",
-            "V20",
-            "V21",
-            "V22",
-            "V23",
-            "V24",
-            "V25",
-            "V26",
-            "V27",
-            "V28",
-            "Amount",
-        ]
-
-        # Initial stats analysis
-        self.statistical_analysis = None
-
-    def analyze_data(self, data: pd.DataFrame) -> None:
-        # Perform a statistical analysis on the unprocessed data
-
-        log.info("Performing statistical analysis on data")
-        self.statistical_analysis = lightwood.data.statistical_analysis(
-            data, self.dtype_dict, {}, self.problem_definition
-        )
-
-        # Instantiate post-training evaluation
-        self.analysis_blocks = [
-            ICP(
-                fixed_significance=None,
-                confidence_normalizer=False,
-                positive_domain=self.statistical_analysis.positive_domain,
-            ),
-            AccStats(deps=["ICP"]),
-            GlobalFeatureImportance(disable_column_importance=False),
-        ]
-
-    def preprocess(self, data: pd.DataFrame) -> pd.DataFrame:
-        # Preprocess and clean data
-
-        log.info("Cleaning the data")
-        data = cleaner(
-            data=data,
-            pct_invalid=self.problem_definition.pct_invalid,
-            identifiers=self.identifiers,
-            dtype_dict=self.dtype_dict,
-            target=self.target,
-            mode=self.mode,
-            timeseries_settings=self.problem_definition.timeseries_settings,
-            anomaly_detection=self.problem_definition.anomaly_detection,
-        )
-
-        # Time-series blocks
-
-        return data
-
-    def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]:
-        # Split the data into training/testing splits
-
-        log.info("Splitting the data into train/test")
-        train_test_data = MyCustomSplitter.MySplitter(
-            data=data, pct_train=0.8, pct_dev=0.1, seed=1, target=self.target
-        )
-
-        return train_test_data
-
-    def prepare(self, data: Dict[str, pd.DataFrame]) -> None:
-        # Prepare encoders to featurize data
-
-        self.mode = "train"
-
-        if self.statistical_analysis is None:
-            raise Exception("Please run analyze_data first")
-
-        # Column to encoder mapping
-        self.encoders = {
-            "Class": Binary.BinaryEncoder(
-                is_target=True,
-                target_class_distribution=self.statistical_analysis.target_class_distribution,
-            ),
-            "Time": Integer.NumericEncoder(),
-            "V1": Float.NumericEncoder(),
-            "V2": Float.NumericEncoder(),
-            "V3": Float.NumericEncoder(),
-            "V4": Float.NumericEncoder(),
-            "V5": Float.NumericEncoder(),
-            "V6": Float.NumericEncoder(),
-            "V7": Float.NumericEncoder(),
-            "V8": Float.NumericEncoder(),
-            "V9": Float.NumericEncoder(),
-            "V10": Float.NumericEncoder(),
-            "V11": Float.NumericEncoder(),
-            "V12": Float.NumericEncoder(),
-            "V13": Float.NumericEncoder(),
-            "V14": Float.NumericEncoder(),
-            "V15": Float.NumericEncoder(),
-            "V16": Float.NumericEncoder(),
-            "V17": Float.NumericEncoder(),
-            "V18": Float.NumericEncoder(),
-            "V19": Float.NumericEncoder(),
-            "V20": Float.NumericEncoder(),
-            "V21": Float.NumericEncoder(),
-            "V22": Float.NumericEncoder(),
-            "V23": Float.NumericEncoder(),
-            "V24": Float.NumericEncoder(),
-            "V25": Float.NumericEncoder(),
-            "V26": Float.NumericEncoder(),
-            "V27": Float.NumericEncoder(),
-            "V28": Float.NumericEncoder(),
-            "Amount": Float.NumericEncoder(),
-        }
-
-        # Prepare the training + dev data
-        concatenated_train_dev = pd.concat([data["train"], data["dev"]])
-
-        log.info("Preparing the encoders")
-
-        encoder_prepping_dict = {}
-
-        # Prepare encoders that do not require learned strategies
-        for col_name, encoder in self.encoders.items():
-            if not encoder.is_trainable_encoder:
-                encoder_prepping_dict[col_name] = [
-                    encoder,
-                    concatenated_train_dev[col_name],
-                    "prepare",
-                ]
-                log.info(
-                    f"Encoder prepping dict length of: {len(encoder_prepping_dict)}"
-                )
-
-        # Setup parallelization
-        parallel_prepped_encoders = mut_method_call(encoder_prepping_dict)
-        for col_name, encoder in parallel_prepped_encoders.items():
-            self.encoders[col_name] = encoder
-
-        # Prepare the target
-        if self.target not in parallel_prepped_encoders:
-            if self.encoders[self.target].is_trainable_encoder:
-                self.encoders[self.target].prepare(
-                    data["train"][self.target], data["dev"][self.target]
-                )
-            else:
-                self.encoders[self.target].prepare(
-                    pd.concat([data["train"], data["dev"]])[self.target]
-                )
-
-        # Prepare any non-target encoders that are learned
-        for col_name, encoder in self.encoders.items():
-            if encoder.is_trainable_encoder:
-                priming_data = pd.concat([data["train"], data["dev"]])
-                kwargs = {}
-                if self.dependencies[col_name]:
-                    kwargs["dependency_data"] = {}
-                    for col in self.dependencies[col_name]:
-                        kwargs["dependency_data"][col] = {
-                            "original_type": self.dtype_dict[col],
-                            "data": priming_data[col],
-                        }
-
-                # If an encoder representation requires the target, provide priming data
-                if hasattr(encoder, "uses_target"):
-                    kwargs["encoded_target_values"] = parallel_prepped_encoders[
-                        self.target
-                    ].encode(priming_data[self.target])
-
-                encoder.prepare(
-                    data["train"][col_name], data["dev"][col_name], **kwargs
-                )
-
-    def featurize(self, split_data: Dict[str, pd.DataFrame]):
-        # Featurize data into numerical representations for models
-
-        log.info("Featurizing the data")
-        feature_data = {key: None for key in split_data.keys()}
-
-        for key, data in split_data.items():
-            feature_data[key] = EncodedDs(self.encoders, data, self.target)
-
-        return feature_data
-
-    def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None:
-        # Fit predictors to estimate target
-
-        self.mode = "train"
-
-        # --------------- #
-        # Extract data
-        # --------------- #
-        # Extract the featurized data into train/dev/test
-        encoded_train_data = enc_data["train"]
-        encoded_dev_data = enc_data["dev"]
-        encoded_test_data = enc_data["test"]
-
-        log.info("Training the mixers")
-
-        # --------------- #
-        # Fit Models
-        # --------------- #
-        # Assign list of mixers
-        self.mixers = [
-            Neural(
-                fit_on_dev=True,
-                search_hyperparameters=True,
-                net="DefaultNet",
-                stop_after=self.problem_definition.seconds_per_mixer,
-                target_encoder=self.encoders[self.target],
-                target=self.target,
-                dtype_dict=self.dtype_dict,
-                input_cols=self.input_cols,
-                timeseries_settings=self.problem_definition.timeseries_settings,
-            ),
-            LightGBM(
-                fit_on_dev=True,
-                stop_after=self.problem_definition.seconds_per_mixer,
-                target=self.target,
-                dtype_dict=self.dtype_dict,
-                input_cols=self.input_cols,
-            ),
-            Regression(
-                stop_after=self.problem_definition.seconds_per_mixer,
-                target=self.target,
-                dtype_dict=self.dtype_dict,
-                target_encoder=self.encoders[self.target],
-            ),
-        ]
-
-        # Train mixers
-        trained_mixers = []
-        for mixer in self.mixers:
-            try:
-                mixer.fit(encoded_train_data, encoded_dev_data)
-                trained_mixers.append(mixer)
-            except Exception as e:
-                log.warning(f"Exception: {e} when training mixer: {mixer}")
-                if True and mixer.stable:
-                    raise e
-
-        # Update mixers to trained versions
-        self.mixers = trained_mixers
-
-        # --------------- #
-        # Create Ensembles
-        # --------------- #
-        log.info("Ensembling the mixer")
-        # Create an ensemble of mixers to identify best performing model
-        self.pred_args = PredictionArguments()
-        self.ensemble = BestOf(
-            ts_analysis=None,
-            data=encoded_test_data,
-            accuracy_functions=self.accuracy_functions,
-            target=self.target,
-            mixers=self.mixers,
-        )
-        self.supports_proba = self.ensemble.supports_proba
-
-    def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None:
-        # Evaluate quality of fit for the ensemble of mixers
-
-        # --------------- #
-        # Extract data
-        # --------------- #
-        # Extract the featurized data into train/dev/test
-        encoded_train_data = enc_data["train"]
-        encoded_dev_data = enc_data["dev"]
-        encoded_test_data = enc_data["test"]
-
-        # --------------- #
-        # Analyze Ensembles
-        # --------------- #
-        log.info("Analyzing the ensemble of mixers")
-        self.model_analysis, self.runtime_analyzer = model_analyzer(
-            data=encoded_test_data,
-            train_data=encoded_train_data,
-            stats_info=self.statistical_analysis,
-            ts_cfg=self.problem_definition.timeseries_settings,
-            accuracy_functions=self.accuracy_functions,
-            predictor=self.ensemble,
-            target=self.target,
-            dtype_dict=self.dtype_dict,
-            analysis_blocks=self.analysis_blocks,
-        )
-
-    def learn(self, data: pd.DataFrame) -> None:
-        log.info(f"Dropping features: {self.problem_definition.ignore_features}")
-        data = data.drop(
-            columns=self.problem_definition.ignore_features, errors="ignore"
-        )
-
-        self.mode = "train"
-
-        # Perform stats analysis
-        self.analyze_data(data)
-
-        # Pre-process the data
-        clean_data = self.preprocess(data)
-
-        # Create train/test (dev) split
-        train_dev_test = self.split(clean_data)
-
-        # Prepare encoders
-        self.prepare(train_dev_test)
-
-        # Create feature vectors from data
-        enc_train_test = self.featurize(train_dev_test)
-
-        # Prepare mixers
-        self.fit(enc_train_test)
-
-        # Analyze the ensemble
-        self.analyze_ensemble(enc_train_test)
-
-        # ------------------------ #
-        # Enable model partial fit AFTER it is trained and evaluated for performance with the appropriate train/dev/test splits.
-        # This assumes the predictor could continuously evolve, hence including reserved testing data may improve predictions.
-        # SET `json_ai.problem_definition.fit_on_all=False` TO TURN THIS BLOCK OFF.
-
-        # Update the mixers with partial fit
-        if self.problem_definition.fit_on_all:
-
-            log.info("Adjustment on validation requested.")
-            update_data = {
-                "new": enc_train_test["test"],
-                "old": ConcatedEncodedDs(
-                    [enc_train_test["train"], enc_train_test["dev"]]
-                ),
-            }  # noqa
-
-            self.adjust(update_data)
-
-    def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None:
-        # Update mixers with new information
-
-        self.mode = "train"
-
-        # --------------- #
-        # Extract data
-        # --------------- #
-        # Extract the featurized data
-        encoded_old_data = new_data["old"]
-        encoded_new_data = new_data["new"]
-
-        # --------------- #
-        # Adjust (Update) Mixers
-        # --------------- #
-        log.info("Updating the mixers")
-
-        for mixer in self.mixers:
-            mixer.partial_fit(encoded_new_data, encoded_old_data)
-
-    def predict(self, data: pd.DataFrame, args: Dict = {}) -> pd.DataFrame:
-
-        # Remove columns that user specifies to ignore
-        log.info(f"Dropping features: {self.problem_definition.ignore_features}")
-        data = data.drop(
-            columns=self.problem_definition.ignore_features, errors="ignore"
-        )
-        for col in self.input_cols:
-            if col not in data.columns:
-                data[col] = [None] * len(data)
-
-        # Clean the data
-        self.mode = "predict"
-        log.info("Cleaning the data")
-        data = cleaner(
-            data=data,
-            pct_invalid=self.problem_definition.pct_invalid,
-            identifiers=self.identifiers,
-            dtype_dict=self.dtype_dict,
-            target=self.target,
-            mode=self.mode,
-            timeseries_settings=self.problem_definition.timeseries_settings,
-            anomaly_detection=self.problem_definition.anomaly_detection,
-        )
-
-        # Featurize the data
-        encoded_ds = EncodedDs(self.encoders, data, self.target)
-        encoded_data = encoded_ds.get_encoded_data(include_target=False)
-
-        self.pred_args = PredictionArguments.from_dict(args)
-        df = self.ensemble(encoded_ds, args=self.pred_args)
-
-        if self.pred_args.all_mixers:
-            return df
-        else:
-            insights, global_insights = explain(
-                data=data,
-                encoded_data=encoded_data,
-                predictions=df,
-                ts_analysis=None,
-                timeseries_settings=self.problem_definition.timeseries_settings,
-                positive_domain=self.statistical_analysis.positive_domain,
-                anomaly_detection=self.problem_definition.anomaly_detection,
-                analysis=self.runtime_analyzer,
-                target_name=self.target,
-                target_dtype=self.dtype_dict[self.target],
-                explainer_blocks=self.analysis_blocks,
-                fixed_confidence=self.pred_args.fixed_confidence,
-                anomaly_error_rate=self.pred_args.anomaly_error_rate,
-                anomaly_cooldown=self.pred_args.anomaly_cooldown,
-            )
-            return insights
-
-
-
-

As you can see, an end-to-end pipeline of our entire ML procedure has been generating. There are several abstracted functions to enable transparency as to what processes your data goes through in order to build these models.

-

The key steps of the pipeline are as follows:

-
    -
  1. Run a statistical analysis with analyze_data

  2. -
  3. Clean your data with preprocess

  4. -
  5. Make a training/dev/testing split with split

  6. -
  7. Prepare your feature-engineering pipelines with prepare

  8. -
  9. Create your features with featurize

  10. -
  11. Fit your predictor models with fit

  12. -
-

You can customize this further if necessary, but you have all the steps necessary to train a model!

-

We recommend familiarizing with these steps by calling the above commands, ideally in order. Some commands (namely prepare, featurize, and fit) do depend on other steps.

-

If you want to omit the individual steps, we recommend your simply call the learn method, which compiles all the necessary steps implemented to give your fully trained predictive models starting with unprocessed data!

-
-
-

6) Call python to run your code and see your preprocessed outputs

-

Once we have code, we can turn this into a python object by calling predictor_from_code. This instantiates the PredictorInterface object.

-

This predictor object can be then used to run your pipeline.

-
-
[7]:
-
-
-
-# Turn the code above into a predictor object
-predictor = predictor_from_code(code)
-
-
-
-
-
-
-
-
-MyCustomCleaner.py
-MyCustomCleaner
-MyCustomSplitter.py
-MyCustomSplitter
-
-
-
-
[8]:
-
-
-
-# Pre-process the data
-cleaned_data = predictor.preprocess(data)
-train_test_data = predictor.split(cleaned_data)
-
-
-
-
-
-
-
-
-INFO:lightwood-51500:Cleaning the data
-INFO:lightwood-51500:Splitting the data into train/test
-/home/natasha/lightwood_modules/MyCustomSplitter.py:56: SettingWithCopyWarning:
-A value is trying to be set on a copy of a slice from a DataFrame.
-Try using .loc[row_indexer,col_indexer] = value instead
-
-See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
-  X_test[target] = y_test
-/home/natasha/lightwood_modules/MyCustomSplitter.py:57: SettingWithCopyWarning:
-A value is trying to be set on a copy of a slice from a DataFrame.
-Try using .loc[row_indexer,col_indexer] = value instead
-
-See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
-  X_dev[target] = y_dev
-
-
-
-
[9]:
-
-
-
-plt.rcParams['font.size']=15
-f = plt.figure(figsize=(18, 5))
-
-ax = f.add_subplot(1,3,1)
-ax.hist(train_test_data["train"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True)
-ax.set_ylabel("Log Counts")
-ax.set_xticks([0, 1])
-ax.set_xticklabels(["0", "1"])
-ax.set_xlabel("Class")
-ax.set_title("Train:\nDistribution of Classes")
-ax.set_ylim([1, 1e6])
-
-ax = f.add_subplot(1,3,2)
-ax.hist(train_test_data["dev"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True, color='k')
-ax.set_ylabel("Log Counts")
-ax.set_xticks([0, 1])
-ax.set_xticklabels(["0", "1"])
-ax.set_xlabel("Class")
-ax.set_title("Dev:\nDistribution of Classes")
-ax.set_ylim([1, 1e6])
-
-
-ax = f.add_subplot(1,3,3)
-ax.hist(train_test_data["test"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True, color='r')
-ax.set_ylabel("Log Counts")
-ax.set_xticks([0, 1])
-ax.set_xticklabels(["0", "1"])
-ax.set_xlabel("Class")
-ax.set_title("Test:\nDistribution of Classes")
-ax.set_ylim([1, 1e6])
-
-f.tight_layout()
-
-
-
-
-
-
-
-../../_images/tutorials_custom_splitter_custom_splitter_21_0.png -
-
-

As you can see, our splitter has greatly increased the representation of the minority class within the training data, but not so for the testing or dev data.

-

We hope this tutorial was informative on how to introduce a custom splitter method to your datasets! For more customization tutorials, please check our documentation.

-

If you want to download the Jupyter-notebook version of this tutorial, check out the source github location found here: lightwood/docssrc/source/tutorials/custom_splitter.

-
-
-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/custom_splitter/custom_splitter.ipynb b/docs/tutorials/custom_splitter/custom_splitter.ipynb deleted file mode 100644 index 174ed0695..000000000 --- a/docs/tutorials/custom_splitter/custom_splitter.ipynb +++ /dev/null @@ -1,1387 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "israeli-spyware", - "metadata": {}, - "source": [ - "## Build your own training/testing split\n", - "\n", - "#### Date: 2021.10.07\n", - "\n", - "When working with machine learning data, splitting into a \"train\", \"dev\" (or validation) and \"test\") set is important. Models use **train** data to learn representations and update their parameters; **dev** or validation data is reserved to see how the model may perform on unknown predictions. While it may not be explicitly trained on, it can be used as a stopping criteria, for hyper-parameter tuning, or as a simple sanity check. Lastly, **test** data is always reserved, hidden from the model, as a final pass to see what models perform best.\n", - "\n", - "Lightwood supports a variety of **encoders** (Feature engineering procedures) and **mixers** (predictor algorithms that go from feature vectors to the target). Given the diversity of algorithms, it is appropriate to split data into these three categories when *preparing* encoders or *fitting* mixers.\n", - "\n", - "Our default approach stratifies labeled data to ensure your train, validation, and test sets are equally represented in all classes. However, in many instances you may want a custom technique to build your own splits. We've included the `splitter` functionality (default found in `lightwood.data.splitter`) to enable you to build your own.\n", - "\n", - "In the following problem, we shall work with a Kaggle dataset around credit card fraud (found [here](https://www.kaggle.com/mlg-ulb/creditcardfraud)). Fraud detection is difficult because the events we are interested in catching are thankfully rare events. Because of that, there is a large **imbalance of classes** (in fact, in this dataset, less than 1% of the data are the rare-event).\n", - "\n", - "In a supervised technique, we may want to ensure our training data sees the rare event of interest. A random shuffle could potentially miss rare events. We will implement **SMOTE** to increase the number of positive classes in our training data.\n", - "\n", - "Let's get started!" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "interim-discussion", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import pandas as pd\n", - "import torch\n", - "import nltk\n", - "import matplotlib.pyplot as plt\n", - "\n", - "import os\n", - "import sys\n", - "\n", - "# Lightwood modules\n", - "import lightwood as lw\n", - "from lightwood import ProblemDefinition, \\\n", - " JsonAI, \\\n", - " json_ai_from_problem, \\\n", - " code_from_json_ai, \\\n", - " predictor_from_code\n", - "\n", - "import imblearn # Vers 0.5.0 minimum requirement" - ] - }, - { - "cell_type": "markdown", - "id": "decimal-techno", - "metadata": {}, - "source": [ - "### 1) Load your data\n", - "\n", - "Lightwood works with `pandas` DataFrames. We can use pandas to load our data. Please download the dataset from the above link and place it in a folder called `data/` where this notebook is located." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "foreign-orchestra", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
TimeV1V2V3V4V5V6V7V8V9...V21V22V23V24V25V26V27V28AmountClass
00.0-1.359807-0.0727812.5363471.378155-0.3383210.4623880.2395990.0986980.363787...-0.0183070.277838-0.1104740.0669280.128539-0.1891150.133558-0.021053149.620
10.01.1918570.2661510.1664800.4481540.060018-0.082361-0.0788030.085102-0.255425...-0.225775-0.6386720.101288-0.3398460.1671700.125895-0.0089830.0147242.690
21.0-1.358354-1.3401631.7732090.379780-0.5031981.8004990.7914610.247676-1.514654...0.2479980.7716790.909412-0.689281-0.327642-0.139097-0.055353-0.059752378.660
31.0-0.966272-0.1852261.792993-0.863291-0.0103091.2472030.2376090.377436-1.387024...-0.1083000.005274-0.190321-1.1755750.647376-0.2219290.0627230.061458123.500
42.0-1.1582330.8777371.5487180.403034-0.4071930.0959210.592941-0.2705330.817739...-0.0094310.798278-0.1374580.141267-0.2060100.5022920.2194220.21515369.990
\n", - "

5 rows × 31 columns

\n", - "
" - ], - "text/plain": [ - " Time V1 V2 V3 V4 V5 V6 V7 \\\n", - "0 0.0 -1.359807 -0.072781 2.536347 1.378155 -0.338321 0.462388 0.239599 \n", - "1 0.0 1.191857 0.266151 0.166480 0.448154 0.060018 -0.082361 -0.078803 \n", - "2 1.0 -1.358354 -1.340163 1.773209 0.379780 -0.503198 1.800499 0.791461 \n", - "3 1.0 -0.966272 -0.185226 1.792993 -0.863291 -0.010309 1.247203 0.237609 \n", - "4 2.0 -1.158233 0.877737 1.548718 0.403034 -0.407193 0.095921 0.592941 \n", - "\n", - " V8 V9 ... V21 V22 V23 V24 V25 \\\n", - "0 0.098698 0.363787 ... -0.018307 0.277838 -0.110474 0.066928 0.128539 \n", - "1 0.085102 -0.255425 ... -0.225775 -0.638672 0.101288 -0.339846 0.167170 \n", - "2 0.247676 -1.514654 ... 0.247998 0.771679 0.909412 -0.689281 -0.327642 \n", - "3 0.377436 -1.387024 ... -0.108300 0.005274 -0.190321 -1.175575 0.647376 \n", - "4 -0.270533 0.817739 ... -0.009431 0.798278 -0.137458 0.141267 -0.206010 \n", - "\n", - " V26 V27 V28 Amount Class \n", - "0 -0.189115 0.133558 -0.021053 149.62 0 \n", - "1 0.125895 -0.008983 0.014724 2.69 0 \n", - "2 -0.139097 -0.055353 -0.059752 378.66 0 \n", - "3 -0.221929 0.062723 0.061458 123.50 0 \n", - "4 0.502292 0.219422 0.215153 69.99 0 \n", - "\n", - "[5 rows x 31 columns]" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Load the data\n", - "ddir = \"data/\"\n", - "filename = os.path.join(ddir, \"creditcard.csv.zip\")\n", - "\n", - "data = pd.read_csv(filename)\n", - "data.head()" - ] - }, - { - "cell_type": "markdown", - "id": "rental-contribution", - "metadata": {}, - "source": [ - "We see **31 columns**, most of these columns appear numerical. Due to confidentiality reasons, the Kaggle dataset mentions that the columns labeled $V_i$ indicate principle components (PCs) from a PCA analysis of the original data from the credit card company. There is also a \"Time\" and \"Amount\", two original features that remained. The time references time after the first transaction in the dataset, and amount is how much money was considered in the transaction. \n", - "\n", - "You can also see a heavy imbalance in the two classes below:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "cathedral-mills", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Text(0.5, 1.0, 'Distribution of Classes')" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAAUQUlEQVR4nO3dfbRddX3n8feHEGDxkOvS2AeQECwPmlp19Irj1FpabU2QlJauRcEHsEVTnKEdHwe0dJZO69SZWVpbpdVUaawuQ5naqaREadXRMDNMJbCkgJROTAMElfBkEhhUAt/54+zsOb3em5wb7r47J3m/1jqL7N/e+3e+94acz/n9fvvsk6pCkiSAQ/ouQJK0/zAUJEktQ0GS1DIUJEktQ0GS1DIUJEktQ0HzLslHkvz2HPW1JMnDSRY0219O8oa56Lvp73NJLpir/mbxvL+b5P4k397H87ckecVc16UDn6GgOdW8GD2aZGeS7yT5X0kuStL+v1ZVF1XV74zY1x5f2Krqrqo6uqoen4Pa353kU1P6X1FVn3iyfc+yjiXA24BlVfUjMxyzKMkHk9zVhOI3mu3F81mrDjyGgrqwsqqOAU4A3gdcAnx8rp8kyaFz3ed+YgnwQFVtm25nksOALwI/DiwHFgEvAR4ATpuvInVgMhTUmaraXlVXA78CXJDkOQBJ1iT53ebPi5P8dTOqeDDJdUkOSfJJBi+O65p3wv8uydIkleTCJHcBXxpqGw6IH0vy1SQ7knw2yVOb5zo9ydbhGnePRpIsB94F/ErzfDc3+9vpqKauy5LcmWRbkj9LMtHs213HBc279/uT/NZMv5skE8359zX9Xdb0/wrgb4FjmzrWTHP6+c3v5peq6utV9URVbauq36mq9dM812lJrm9+x99K8uEmWMjA7zc/z44ktwz9PZ2R5OvNqO+eJG8f6vPMJF8bGg0+d2jfJc3xO5PckeTlM/0etP8xFNS5qvoqsBX4qWl2v63Z93Tghxm8MFdVvQ64i8Go4+iq+s9D5/w08GzglTM85fnArwE/CuwC/nCEGj8P/Efgz5vne940h72+efwM8EzgaODDU455KXAq8HLg3yd59gxP+SFgounnp5uaf7WqvgCsAL7Z1PH6ac59BfD5qnp4bz9X43HgLcBiBiOKlwP/utn388DLgFOaes5hMOKAweju15tR33OALwEk+RfAFcCvA08DPgpcneTwJKcCFwMvas57JbBlxDq1HzAUNF++CTx1mvbHGLx4n1BVj1XVdbX3G3K9u6oeqapHZ9j/yaq6taoeAX4bOGf3QvST9BrgA1W1uXlBfidw7pRRynuq6tGquhm4GfiBcGlqORd4Z1XtrKotwPuB141Yx9OAb41adFXdWFX/u6p2Nc/1UQZBBIPf/zHAs4BU1e1V9a2hfcuSLKqqh6rqpqZ9FfDRqvq7qnq8WXP5HvAvGQTQ4c15C6tqS1V9Y9Ra1T9DQfPlOODBadr/C7AJ+Jskm5NcOkJfd89i/53AQgbvkp+sY5v+hvs+lMEIZ7fhq4X+L4PRxFSLm5qm9nXciHU8wCBIR5LklGaK7ttJdjAYES0GqKovMRjtXA5sS7I6yaLm1F8GzgDuTPKVJC9p2k8A3tZMHX0nyXeA44Fjq2oT8Gbg3U1/VyY5dtRa1T9DQZ1L8iIGL3j/Y+q+5p3y26rqmcAvAG8dmoOeacSwt5HE8UN/XsLgHe/9wCPAkUN1LWAwbTVqv99k8II43Pcu4N69nDfV/U1NU/u6Z8TzvwC8MslRIx7/x8A/ACdX1SIGU3TZvbOq/rCqXggsYzCN9I6m/YaqOgv4IeCvgKuaU+4G3ltVTxl6HFlVa5vzPl1VL21+vgL+04h1aj9gKKgzzWWTZwJXAp+qqlumOebMJCclCbCdwfTDE83uexnMuc/Wa5MsS3Ik8B+Av2guWf1H4Igkr0qyELiMwVTHbvcCSzN0+ewUa4G3JDkxydH8/zWIXbMprqnlKuC9SY5JcgLwVuBTez6z9UkGL8yfSfKsZoH6aUneleSMaY4/BtgBPJzkWcCbdu9I8qIkL25+H48A3wWeSHJYktckmaiqx5rzd/+9/AlwUXNekhzV/E6PSXJqkp9NcnjT16ND52kMGArqwrokOxm8cP0W8AHgV2c49mQG73wfBq4H/qiq/nuz7/eAy5opirfPcP50PgmsYTCVcwTwmzC4GorBAuvHGLwrf4TBIvdu/7X57wNJbuIHXdH0vQH4JwYver8xi7qG/Ubz/JsZjKA+3fS/V1X1PQaLzf/A4EqlHcBXGUwJ/d00p7wdeDWwk8EL+p8P7VvUtD3EYArrAQZTejBY49jSTDldxGBNharaCLyRwbTTQwym/17fnHM4g8uQ72fw+/8hBmsvGhPxS3YkSbs5UpAktQwFSVLLUJAktQwFSVJrrG8otnjx4lq6dGnfZUjSWLnxxhvvr6qnT7dvrENh6dKlbNy4se8yJGmsJLlzpn1OH0mSWoaCJKllKEiSWoaCJKllKEiSWoaCJKllKEiSWmMZCklWJlm9ffv2vkuRpAPKWH54rarWAesmJyffuK99LL30mjmsqF9b3veqvkuQdIAYy5GCJKkbhoIkqWUoSJJahoIkqWUoSJJahoIkqWUoSJJahoIkqWUoSJJahoIkqTWWoeC9jySpG2MZClW1rqpWTUxM9F2KJB1QxjIUJEndMBQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSS1DQZLUMhQkSa39JhSSnJ7kuiQfSXJ63/VI0sGo01BIckWSbUlundK+PMkdSTYlubRpLuBh4Ahga5d1SZKm1/VIYQ2wfLghyQLgcmAFsAw4L8ky4LqqWgFcAryn47okSdPoNBSqagPw4JTm04BNVbW5qr4PXAmcVVVPNPsfAg7vsi5J0vQO7eE5jwPuHtreCrw4ydnAK4GnAB+e6eQkq4BVAEuWLOmuSkk6CPURCtOqqr8E/nKE41YDqwEmJyer67ok6WDSx9VH9wDHD20/o2mTJPWsj1C4ATg5yYlJDgPOBa6eTQdJViZZvX379k4KlKSDVdeXpK4FrgdOTbI1yYVVtQu4GLgWuB24qqpum02/VbWuqlZNTEzMfdGSdBDrdE2hqs6boX09sL7L55Ykzd5+84lmSVL/xjIUXFOQpG6MZSi4piBJ3RjLUJAkdcNQkCS1xjIUXFOQpG6MZSi4piBJ3RjLUJAkdcNQkCS1DAVJUmssQ8GFZknqxliGggvNktSNsQwFSVI3DAVJUstQkCS1xjIUXGiWpG6MZSi40CxJ3RjLUJAkdcNQkCS1DAVJUstQkCS1DAVJUmssQ8FLUiWpG2MZCl6SKkndGMtQkCR1w1CQJLUMBUlSy1CQJLUMBUlSy1CQJLUMBUlSayxDwQ+vSVI3xjIU/PCaJHVjLENBktSNWYVCkkOSLOqqGElSv/YaCkk+nWRRkqOAW4GvJ3lH96VJkubbKCOFZVW1A/hF4HPAicDruixKktSPUUJhYZKFDELh6qp6rNuSJEl9GSUUPgpsAY4CNiQ5AfBaUEk6AI0SCuuq6riqOqOqCrgL+LWO65Ik9WCUUPjM8EYTDFd2U44kqU+HzrQjybOAHwcmkpw9tGsRcETXhUmS5t+MoQCcCpwJPAVYOdS+E3hjhzVJknoyYyhU1WeBzyZ5SVVdP481SZJ6sqeRwm6bkrwLWDp8fFX1tticZCWw8qSTTuqrBEk6II2y0PxZYAL4AnDN0KM33hBPkroxykjhyKq6pPNKJEm9G2Wk8NdJzui8EklS70YJhX/LIBgeTbIjyc4kO7ouTJI0//Y6fVRVx8xHIZKk/u01FJK8bLr2qtow9+VIkvo0ykLz8HcnHAGcBtwI/GwnFUmSejPK9NHwp5lJcjzwwa4KkiT1Z1++o3kr8Oy5LkSS1L9R1hQ+BFSzeQjwfOCmDmuSJPVklDWFjUN/3gWsrar/2VE9kqQejbKm8IkkhwGnNE13dFuSJKkvo0wfnQ58gsFXcgY4PskFXpIqSQeeUaaP3g/8fFXdAZDkFGAt8MIuC5Mkzb9Rrj5auDsQAKrqH4GF3ZUkSerLSAvNST4GfKrZfi3/fPFZknSAGCUU3gT8G+A3m+0NwB93VpEkqTczTh8leXqSZVX1var6QFWdXVVnA38LLOqimCRHJdmY5Mwu+pck7dme1hQ+BCyepv2pwB+M0nmSK5JsS3LrlPblSe5IsinJpUO7LgGuGqVvSdLc21MonDTdZadVdR3w3BH7XwMsH25IsgC4HFgBLAPOS7Isyc8BXwe2jdi3JGmO7WlNYU/fozDS1UdVtSHJ0inNpwGbqmozQJIrgbOAo4GjGATFo0nWV9UTU/tMsgpYBbBkyZJRypAkjWhPobApyRlVtX64MckKYPOTeM7jgLuHtrcCL66qi5v+Xw/cP10gAFTVamA1wOTkZE13jCRp3+wpFN4MXJPkHAbfnwAwCbwE6GwhuKrWdNW3JGnPZlxTqKr/A/wE8BVgafP4CvDc5gNs++oe4Pih7Wc0bZKknu3xcwpV9T3gT+f4OW8ATk5yIoMwOBd49Ww6SLISWHnSSSfNcWmSdHDbly/ZGVmStcD1wKlJtia5sKp2ARcD1wK3A1dV1W2z6beq1lXVqomJibkvWpIOYqN8onmfVdV5M7SvB9ZPt0+S1J9ORwpdSbIyyert27f3XYokHVD2GgpJbkny91Me1yX5/SRPm48ip3L6SJK6Mcr00eeAx4FPN9vnAkcC32bwieWVnVQmSZp3o4TCK6rqBUPbtyS5qapekOS1XRUmSZp/o6wpLEhy2u6NJC8CFjSbuzqpai9cU5CkbowSCm8APp7kn5JsAT4OvCHJUcDvdVncTFxTkKRu7HX6qKpuAH4iyUSzPfz23NtcS9IBZJSrjyaSfAD4IvDFJO/fHRCSpAPLKNNHVwA7gXOaxw7m/tYXkqT9wChXH/1YVf3y0PZ7knyto3pG4r2PJKkbo4wUHk3y0t0bSX4SeLS7kvbOhWZJ6sYoI4WLgD8bWkd4CLigu5IkSX0Z5eqjm4HnJVnUbO9I8mbg7zuuTZI0z0a+IV5V7aiqHc3mWzuqR5LUo329S2rmtApJ0n5hX0Oh5rSKWfI2F5LUjRlDIcnOJDumeewEjp3HGn+AVx9JUjdmXGiuqmPmsxBJUv/G8pvXJEndMBQkSS1DQZLUMhQkSa2xDAUvSZWkboxlKHhJqiR1YyxDQZLUDUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJrbEMBT+8JkndGMtQ8MNrktSNsQwFSVI3DAVJUstQkCS1DAVJUstQkCS1DAVJUstQkCS1DAVJUstQkCS1DAVJUmssQ8F7H0lSN8YyFLz3kSR1YyxDQZLUDUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJLUNBktQyFCRJrf0mFJI8O8lHkvxFkjf1XY8kHYw6DYUkVyTZluTWKe3Lk9yRZFOSSwGq6vaqugg4B/jJLuuSJE2v65HCGmD5cEOSBcDlwApgGXBekmXNvl8ArgHWd1yXJGkanYZCVW0AHpzSfBqwqao2V9X3gSuBs5rjr66qFcBrZuozyaokG5NsvO+++7oqXZIOSof28JzHAXcPbW8FXpzkdOBs4HD2MFKoqtXAaoDJycnqrEpJOgj1EQrTqqovA1/uuQxJOqj1cfXRPcDxQ9vPaNpGlmRlktXbt2+f08Ik6WDXRyjcAJyc5MQkhwHnAlfPpoOqWldVqyYmJjopUJIOVl1fkroWuB44NcnWJBdW1S7gYuBa4Hbgqqq6rcs6JEmj6XRNoarOm6F9PV52Kkn7nf3mE82z4ZqCJHVjLEPBNQVJ6sZYhoIkqRuGgiSpNZah4JqCJHVjLEPBNQVJ6sZYhoIkqRuGgiSpZShIklpjGQouNEtSN8YyFFxolqRujGUoSJK6YShIklqGgiSpNZah4EKzJHVjv/mO5tmoqnXAusnJyTf2XYuk8bH00mv6LmHObHnfqzrpdyxHCpKkbhgKkqSWoSBJahkKkqSWoSBJao1lKHhJqiR1YyxDwXsfSVI3xjIUJEndMBQkSa1UVd817LMk9wF39l3HXiwG7u+7CEnzZhz+zZ9QVU+fbsdYh8I4SLKxqib7rkPS/Bj3f/NOH0mSWoaCJKllKHRvdd8FSJpXY/1v3jUFSVLLkYIkqWUoSJJahkJHkixPckeSTUku7bseSd1KckWSbUlu7buWJ8NQ6ECSBcDlwApgGXBekmX9ViWpY2uA5X0X8WQZCt04DdhUVZur6vvAlcBZPdckqUNVtQF4sO86nixDoRvHAXcPbW9t2iRpv2YoSJJahkI37gGOH9p+RtMmSfs1Q6EbNwAnJzkxyWHAucDVPdckSXtlKHSgqnYBFwPXArcDV1XVbf1WJalLSdYC1wOnJtma5MK+a9oX3uZCktRypCBJahkKkqSWoSBJahkKkqSWoSBJahkK0oiS/EiSK5N8I8mNSdYnOWXc74opDTu07wKkcZAkwH8DPlFV5zZtzwN+uNfCpDnmSEEazc8Aj1XVR3Y3VNXNDN34MMnSJNclual5/Kum/UeTbEjytSS3JvmpJAuSrGm2b0nylvn/kaQf5EhBGs1zgBv3csw24Oeq6rtJTgbWApPAq4Frq+q9zXdtHAk8Hziuqp4DkOQpXRUuzYahIM2dhcCHkzwfeBw4pWm/AbgiyULgr6rqa0k2A89M8iHgGuBv+ihYmsrpI2k0twEv3MsxbwHuBZ7HYIRwGLRfvvIyBnfKXZPk/Kp6qDnuy8BFwMe6KVuaHUNBGs2XgMOTrNrdkOS5/PNbpE8A36qqJ4DXAQua404A7q2qP2Hw4v+CJIuBQ6rqM8BlwAvm58eQ9szpI2kEVVVJfgn4YJJLgO8CW4A3Dx32R8BnkpwPfB54pGk/HXhHkseAh4HzGXwT358m2f3G7J1d/wzSKLxLqiSp5fSRJKllKEiSWoaCJKllKEiSWoaCJKllKEiSWoaCJKn1/wAv2shaiu0DZwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "f = plt.figure()\n", - "ax = f.add_subplot(1,1,1)\n", - "ax.hist(data['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True)\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Distribution of Classes\")" - ] - }, - { - "cell_type": "markdown", - "id": "exact-timeline", - "metadata": {}, - "source": [ - "### 2) Create a JSON-AI default object\n", - "We will now create JSON-AI syntax for our problem based on its specifications. We can do so by setting up a ``ProblemDefinition``. The ``ProblemDefinition`` allows us to specify the target, the column we intend to predict, along with other details. \n", - "\n", - "The end goal of JSON-AI is to provide **a set of instructions on how to compile a machine learning pipeline*.\n", - "\n", - "Our target here is called \"**Class**\", which indicates \"0\" for no fraud and \"1\" for fraud. We'll generate the JSON-AI with the minimal syntax:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "medieval-zambia", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-51500:Dropping features: []\n", - "INFO:lightwood-51500:Analyzing a sample of 18424\n", - "INFO:lightwood-51500:from a total population of 284807, this is equivalent to 6.5% of your data.\n", - "INFO:lightwood-51500:Using 15 processes to deduct types.\n", - "INFO:lightwood-51500:Infering type for: Time\n", - "INFO:lightwood-51500:Infering type for: V1\n", - "INFO:lightwood-51500:Infering type for: V2\n", - "INFO:lightwood-51500:Infering type for: V3\n", - "INFO:lightwood-51500:Infering type for: V4\n", - "INFO:lightwood-51500:Infering type for: V5\n", - "INFO:lightwood-51500:Infering type for: V6\n", - "INFO:lightwood-51500:Infering type for: V7\n", - "INFO:lightwood-51500:Infering type for: V8\n", - "INFO:lightwood-51500:Infering type for: V9\n", - "INFO:lightwood-51500:Infering type for: V10\n", - "INFO:lightwood-51500:Infering type for: V11\n", - "INFO:lightwood-51500:Infering type for: V12\n", - "INFO:lightwood-51500:Infering type for: V13\n", - "INFO:lightwood-51500:Infering type for: V14\n", - "INFO:lightwood-51500:Column Time has data type integer\n", - "INFO:lightwood-51500:Infering type for: V15\n", - "INFO:lightwood-51500:Column V4 has data type float\n", - "INFO:lightwood-51500:Infering type for: V16\n", - "INFO:lightwood-51500:Column V2 has data type float\n", - "INFO:lightwood-51500:Infering type for: V17\n", - "INFO:lightwood-51500:Column V3 has data type float\n", - "INFO:lightwood-51500:Column V1 has data type float\n", - "INFO:lightwood-51500:Infering type for: V18\n", - "INFO:lightwood-51500:Infering type for: V19\n", - "INFO:lightwood-51500:Column V6 has data type float\n", - "INFO:lightwood-51500:Column V5 has data type float\n", - "INFO:lightwood-51500:Infering type for: V20\n", - "INFO:lightwood-51500:Column V7 has data type float\n", - "INFO:lightwood-51500:Infering type for: V21\n", - "INFO:lightwood-51500:Column V8 has data type float\n", - "INFO:lightwood-51500:Infering type for: V22\n", - "INFO:lightwood-51500:Infering type for: V23\n", - "INFO:lightwood-51500:Column V9 has data type float\n", - "INFO:lightwood-51500:Infering type for: V24\n", - "INFO:lightwood-51500:Column V10 has data type float\n", - "INFO:lightwood-51500:Column V13 has data type float\n", - "INFO:lightwood-51500:Column V12 has data type float\n", - "INFO:lightwood-51500:Infering type for: V25\n", - "INFO:lightwood-51500:Column V11 has data type float\n", - "INFO:lightwood-51500:Infering type for: V26\n", - "INFO:lightwood-51500:Column V14 has data type float\n", - "INFO:lightwood-51500:Infering type for: V28\n", - "INFO:lightwood-51500:Infering type for: V27\n", - "INFO:lightwood-51500:Infering type for: Amount\n", - "INFO:lightwood-51500:Column V15 has data type float\n", - "INFO:lightwood-51500:Infering type for: Class\n", - "INFO:lightwood-51500:Column V16 has data type float\n", - "INFO:lightwood-51500:Column V17 has data type float\n", - "INFO:lightwood-51500:Column Class has data type binary\n", - "INFO:lightwood-51500:Column Amount has data type float\n", - "INFO:lightwood-51500:Column V23 has data type float\n", - "INFO:lightwood-51500:Column V18 has data type float\n", - "INFO:lightwood-51500:Column V19 has data type float\n", - "INFO:lightwood-51500:Column V20 has data type float\n", - "INFO:lightwood-51500:Column V28 has data type float\n", - "INFO:lightwood-51500:Column V21 has data type float\n", - "INFO:lightwood-51500:Column V22 has data type float\n", - "INFO:lightwood-51500:Column V26 has data type float\n", - "INFO:lightwood-51500:Column V24 has data type float\n", - "INFO:lightwood-51500:Column V25 has data type float\n", - "INFO:lightwood-51500:Column V27 has data type float\n", - "INFO:lightwood-51500:Starting statistical analysis\n", - "INFO:lightwood-51500:Finished statistical analysis\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Setup the problem definition\n", - "problem_definition = {\n", - " 'target': 'Class',\n", - "}\n", - "\n", - "# Generate the j{ai}son syntax\n", - "default_json = json_ai_from_problem(data, problem_definition)\n" - ] - }, - { - "cell_type": "markdown", - "id": "deadly-rotation", - "metadata": {}, - "source": [ - "Lightwood looks at each of the many columns and indicates they are mostly float, with exception of \"**Class**\" which is binary.\n", - "\n", - "You can observe the JSON-AI if you run the command `print(default_json.to_json())`. Given there are many input features, we won't print it out." - ] - }, - { - "cell_type": "markdown", - "id": "immune-clone", - "metadata": {}, - "source": [ - "These are the only elements required to get off the ground with JSON-AI. However, we're interested in making a *custom* approach. So, let's make this syntax a file, and introduce our own changes." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "owned-translator", - "metadata": {}, - "outputs": [], - "source": [ - "with open(\"default.json\", \"w\") as fp:\n", - " fp.write(default_json.to_json())" - ] - }, - { - "cell_type": "markdown", - "id": "massive-divide", - "metadata": {}, - "source": [ - "### 3) Build your own splitter module\n", - "\n", - "For Lightwood, the goal of a splitter is to intake an initial dataset (pre-processed ideally, although you can run the pre-processor on each DataFrame within the splitter) and return a dictionary with the keys \"train\", \"test\", and \"dev\" (at minimum). Subsequent steps of the pipeline expect the keys \"train\", \"test\", and \"dev\", so it's important you assign datasets to these as necessary. \n", - "\n", - "We're going to introduce SMOTE sampling in our splitter. SMOTE allows you to quickly learn an approximation to make extra \"samples\" that mimic the undersampled class. \n", - "\n", - "We will use the package `imblearn` and `scikit-learn` to quickly create a train/test split and apply SMOTE to our training data only.\n", - "\n", - "**NOTE** This is simply an example of things you can do with the splitter; whether SMOTE sampling is ideal for your problem depends on the question you're trying to answer!" - ] - }, - { - "cell_type": "markdown", - "id": "comparable-diameter", - "metadata": {}, - "source": [ - "```\n", - "from lightwood.api.dtype import dtype\n", - "import pandas as pd\n", - "import numpy as np\n", - "from typing import List, Dict\n", - "from itertools import product\n", - "from lightwood.api.types import TimeseriesSettings\n", - "from lightwood.helpers.log import log\n", - "\n", - "\n", - "from imblearn.over_sampling import SMOTE\n", - "from sklearn.model_selection import train_test_split\n", - "\n", - "\n", - "def MySplitter(\n", - " data: pd.DataFrame,\n", - " target: str,\n", - " pct_train: float = 0.8,\n", - " pct_dev: float = 0.1,\n", - " seed: int = 1,\n", - ") -> Dict[str, pd.DataFrame]:\n", - " \"\"\"\n", - " Custom splitting function\n", - "\n", - "\n", - " :param data: Input data\n", - " :param target: Name of the target\n", - " :param pct_train: Percentage of data reserved for training, taken out of full data\n", - " :param pct_dev: Percentage of data reserved for dev, taken out of train data\n", - " :param seed: Random seed for reproducibility\n", - "\n", - " :returns: A dictionary containing the keys train, test and dev with their respective data frames.\n", - " \"\"\"\n", - "\n", - " # Shuffle the data\n", - " data = data.sample(frac=1, random_state=seed).reset_index(drop=True)\n", - "\n", - " # Split into feature columns + target\n", - " X = data.iloc[:, data.columns != target] # .values\n", - " y = data[target] # .values\n", - "\n", - " # Create a train/test split\n", - " X_train, X_test, y_train, y_test = train_test_split(\n", - " X, y, train_size=pct_train, random_state=seed, stratify=data[target]\n", - " )\n", - "\n", - " X_train, X_dev, y_train, y_dev = train_test_split(\n", - " X, y, test_size=pct_dev, random_state=seed, stratify=y_train\n", - " )\n", - "\n", - " # Create a SMOTE model and bump up underbalanced class JUST for train data\n", - " SMOTE_model = SMOTE(random_state=seed)\n", - "\n", - " Xtrain_mod, ytrain_mod = SMOTE_model.fit_resample(X_train, y_train.ravel())\n", - "\n", - " Xtrain_mod[target] = ytrain_mod\n", - " X_test[target] = y_test\n", - " X_dev[target] = y_dev\n", - "\n", - " return {\"train\": Xtrain_mod, \"test\": X_test, \"dev\": X_dev}\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "analyzed-radical", - "metadata": {}, - "source": [ - "#### Place your custom module in `~/lightwood_modules`\n", - "\n", - "We automatically search for custom scripts in your `~/lightwood_modules` path. Place your file there. Later, you'll see when we autogenerate code, that you can change your import location if you choose." - ] - }, - { - "cell_type": "markdown", - "id": "lucky-blair", - "metadata": {}, - "source": [ - "### 4) Introduce your custom splitter in JSON-AI\n", - "\n", - "Now let's introduce our custom splitter. JSON-AI keeps a lightweight syntax but fills in many default modules (like splitting, cleaning).\n", - "\n", - "For the custom cleaner, we'll work by editing the \"splitter\" key. We will change properties within it as follows:\n", - "(1) \"module\" - place the name of the function. In our case it will be \"MyCustomCleaner.cleaner\"\n", - "(2) \"args\" - any keyword argument specific to your cleaner's internals. \n", - "\n", - "This will look as follows:\n", - "```\n", - " \"splitter\": {\n", - " \"module\": \"MyCustomSplitter.MySplitter\",\n", - " \"args\": {\n", - " \"data\": \"data\",\n", - " \"target\": \"$target\",\n", - " \"pct_train\": 0.8,\n", - " \"pct_dev\": 0.1,\n", - " \"seed\": 1\n", - " }\n", - " },\n", - "```\n", - "\n", - "Let's copy our file `default.json` into `custom.json` and add this block. Then, we can proceed as usual to create python code." - ] - }, - { - "cell_type": "markdown", - "id": "identical-georgia", - "metadata": {}, - "source": [ - "### 5) Generate Python code representing your ML pipeline\n", - "\n", - "Now we're ready to load up our custom JSON-AI and generate the predictor code!\n", - "\n", - "We can do this by first reading in our custom json-syntax, and then calling the function `code_from_json_ai`. " - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "alleged-concentrate", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n", - "import lightwood\n", - "from lightwood.analysis import *\n", - "from lightwood.api import *\n", - "from lightwood.data import *\n", - "from lightwood.encoder import *\n", - "from lightwood.ensemble import *\n", - "from lightwood.helpers.device import *\n", - "from lightwood.helpers.general import *\n", - "from lightwood.helpers.log import *\n", - "from lightwood.helpers.numeric import *\n", - "from lightwood.helpers.parallelism import *\n", - "from lightwood.helpers.seed import *\n", - "from lightwood.helpers.text import *\n", - "from lightwood.helpers.torch import *\n", - "from lightwood.mixer import *\n", - "import pandas as pd\n", - "from typing import Dict, List\n", - "import os\n", - "from types import ModuleType\n", - "import importlib.machinery\n", - "import sys\n", - "\n", - "\n", - "for import_dir in [os.path.expanduser(\"~/lightwood_modules\"), \"/etc/lightwood_modules\"]:\n", - " if os.path.exists(import_dir) and os.access(import_dir, os.R_OK):\n", - " for file_name in list(os.walk(import_dir))[0][2]:\n", - " print(file_name)\n", - " if file_name[-3:] != \".py\":\n", - " continue\n", - " mod_name = file_name[:-3]\n", - " print(mod_name)\n", - " loader = importlib.machinery.SourceFileLoader(\n", - " mod_name, os.path.join(import_dir, file_name)\n", - " )\n", - " module = ModuleType(loader.name)\n", - " loader.exec_module(module)\n", - " sys.modules[mod_name] = module\n", - " exec(f\"import {mod_name}\")\n", - "\n", - "\n", - "class Predictor(PredictorInterface):\n", - " target: str\n", - " mixers: List[BaseMixer]\n", - " encoders: Dict[str, BaseEncoder]\n", - " ensemble: BaseEnsemble\n", - " mode: str\n", - "\n", - " def __init__(self):\n", - " seed(420)\n", - " self.target = \"Class\"\n", - " self.mode = \"inactive\"\n", - " self.problem_definition = ProblemDefinition.from_dict(\n", - " {\n", - " \"target\": \"Class\",\n", - " \"pct_invalid\": 2,\n", - " \"unbias_target\": True,\n", - " \"seconds_per_mixer\": 14354,\n", - " \"seconds_per_encoder\": 0,\n", - " \"time_aim\": 64593.50573948541,\n", - " \"target_weights\": None,\n", - " \"positive_domain\": False,\n", - " \"timeseries_settings\": {\n", - " \"is_timeseries\": False,\n", - " \"order_by\": None,\n", - " \"window\": None,\n", - " \"group_by\": None,\n", - " \"use_previous_target\": True,\n", - " \"nr_predictions\": None,\n", - " \"historical_columns\": None,\n", - " \"target_type\": \"\",\n", - " \"allow_incomplete_history\": False,\n", - " },\n", - " \"anomaly_detection\": True,\n", - " \"ignore_features\": [],\n", - " \"fit_on_all\": True,\n", - " \"strict_mode\": True,\n", - " \"seed_nr\": 420,\n", - " }\n", - " )\n", - " self.accuracy_functions = [\"balanced_accuracy_score\"]\n", - " self.identifiers = {}\n", - " self.dtype_dict = {\n", - " \"Class\": \"binary\",\n", - " \"Time\": \"integer\",\n", - " \"V1\": \"float\",\n", - " \"V2\": \"float\",\n", - " \"V3\": \"float\",\n", - " \"V4\": \"float\",\n", - " \"V5\": \"float\",\n", - " \"V6\": \"float\",\n", - " \"V7\": \"float\",\n", - " \"V8\": \"float\",\n", - " \"V9\": \"float\",\n", - " \"V10\": \"float\",\n", - " \"V11\": \"float\",\n", - " \"V12\": \"float\",\n", - " \"V13\": \"float\",\n", - " \"V14\": \"float\",\n", - " \"V15\": \"float\",\n", - " \"V16\": \"float\",\n", - " \"V17\": \"float\",\n", - " \"V18\": \"float\",\n", - " \"V19\": \"float\",\n", - " \"V20\": \"float\",\n", - " \"V21\": \"float\",\n", - " \"V22\": \"float\",\n", - " \"V23\": \"float\",\n", - " \"V24\": \"float\",\n", - " \"V25\": \"float\",\n", - " \"V26\": \"float\",\n", - " \"V27\": \"float\",\n", - " \"V28\": \"float\",\n", - " \"Amount\": \"float\",\n", - " }\n", - "\n", - " # Any feature-column dependencies\n", - " self.dependencies = {\n", - " \"Time\": [],\n", - " \"V1\": [],\n", - " \"V2\": [],\n", - " \"V3\": [],\n", - " \"V4\": [],\n", - " \"V5\": [],\n", - " \"V6\": [],\n", - " \"V7\": [],\n", - " \"V8\": [],\n", - " \"V9\": [],\n", - " \"V10\": [],\n", - " \"V11\": [],\n", - " \"V12\": [],\n", - " \"V13\": [],\n", - " \"V14\": [],\n", - " \"V15\": [],\n", - " \"V16\": [],\n", - " \"V17\": [],\n", - " \"V18\": [],\n", - " \"V19\": [],\n", - " \"V20\": [],\n", - " \"V21\": [],\n", - " \"V22\": [],\n", - " \"V23\": [],\n", - " \"V24\": [],\n", - " \"V25\": [],\n", - " \"V26\": [],\n", - " \"V27\": [],\n", - " \"V28\": [],\n", - " \"Amount\": [],\n", - " }\n", - "\n", - " self.input_cols = [\n", - " \"Time\",\n", - " \"V1\",\n", - " \"V2\",\n", - " \"V3\",\n", - " \"V4\",\n", - " \"V5\",\n", - " \"V6\",\n", - " \"V7\",\n", - " \"V8\",\n", - " \"V9\",\n", - " \"V10\",\n", - " \"V11\",\n", - " \"V12\",\n", - " \"V13\",\n", - " \"V14\",\n", - " \"V15\",\n", - " \"V16\",\n", - " \"V17\",\n", - " \"V18\",\n", - " \"V19\",\n", - " \"V20\",\n", - " \"V21\",\n", - " \"V22\",\n", - " \"V23\",\n", - " \"V24\",\n", - " \"V25\",\n", - " \"V26\",\n", - " \"V27\",\n", - " \"V28\",\n", - " \"Amount\",\n", - " ]\n", - "\n", - " # Initial stats analysis\n", - " self.statistical_analysis = None\n", - "\n", - " def analyze_data(self, data: pd.DataFrame) -> None:\n", - " # Perform a statistical analysis on the unprocessed data\n", - "\n", - " log.info(\"Performing statistical analysis on data\")\n", - " self.statistical_analysis = lightwood.data.statistical_analysis(\n", - " data, self.dtype_dict, {}, self.problem_definition\n", - " )\n", - "\n", - " # Instantiate post-training evaluation\n", - " self.analysis_blocks = [\n", - " ICP(\n", - " fixed_significance=None,\n", - " confidence_normalizer=False,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " ),\n", - " AccStats(deps=[\"ICP\"]),\n", - " GlobalFeatureImportance(disable_column_importance=False),\n", - " ]\n", - "\n", - " def preprocess(self, data: pd.DataFrame) -> pd.DataFrame:\n", - " # Preprocess and clean data\n", - "\n", - " log.info(\"Cleaning the data\")\n", - " data = cleaner(\n", - " data=data,\n", - " pct_invalid=self.problem_definition.pct_invalid,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Time-series blocks\n", - "\n", - " return data\n", - "\n", - " def split(self, data: pd.DataFrame) -> Dict[str, pd.DataFrame]:\n", - " # Split the data into training/testing splits\n", - "\n", - " log.info(\"Splitting the data into train/test\")\n", - " train_test_data = MyCustomSplitter.MySplitter(\n", - " data=data, pct_train=0.8, pct_dev=0.1, seed=1, target=self.target\n", - " )\n", - "\n", - " return train_test_data\n", - "\n", - " def prepare(self, data: Dict[str, pd.DataFrame]) -> None:\n", - " # Prepare encoders to featurize data\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " if self.statistical_analysis is None:\n", - " raise Exception(\"Please run analyze_data first\")\n", - "\n", - " # Column to encoder mapping\n", - " self.encoders = {\n", - " \"Class\": Binary.BinaryEncoder(\n", - " is_target=True,\n", - " target_class_distribution=self.statistical_analysis.target_class_distribution,\n", - " ),\n", - " \"Time\": Integer.NumericEncoder(),\n", - " \"V1\": Float.NumericEncoder(),\n", - " \"V2\": Float.NumericEncoder(),\n", - " \"V3\": Float.NumericEncoder(),\n", - " \"V4\": Float.NumericEncoder(),\n", - " \"V5\": Float.NumericEncoder(),\n", - " \"V6\": Float.NumericEncoder(),\n", - " \"V7\": Float.NumericEncoder(),\n", - " \"V8\": Float.NumericEncoder(),\n", - " \"V9\": Float.NumericEncoder(),\n", - " \"V10\": Float.NumericEncoder(),\n", - " \"V11\": Float.NumericEncoder(),\n", - " \"V12\": Float.NumericEncoder(),\n", - " \"V13\": Float.NumericEncoder(),\n", - " \"V14\": Float.NumericEncoder(),\n", - " \"V15\": Float.NumericEncoder(),\n", - " \"V16\": Float.NumericEncoder(),\n", - " \"V17\": Float.NumericEncoder(),\n", - " \"V18\": Float.NumericEncoder(),\n", - " \"V19\": Float.NumericEncoder(),\n", - " \"V20\": Float.NumericEncoder(),\n", - " \"V21\": Float.NumericEncoder(),\n", - " \"V22\": Float.NumericEncoder(),\n", - " \"V23\": Float.NumericEncoder(),\n", - " \"V24\": Float.NumericEncoder(),\n", - " \"V25\": Float.NumericEncoder(),\n", - " \"V26\": Float.NumericEncoder(),\n", - " \"V27\": Float.NumericEncoder(),\n", - " \"V28\": Float.NumericEncoder(),\n", - " \"Amount\": Float.NumericEncoder(),\n", - " }\n", - "\n", - " # Prepare the training + dev data\n", - " concatenated_train_dev = pd.concat([data[\"train\"], data[\"dev\"]])\n", - "\n", - " log.info(\"Preparing the encoders\")\n", - "\n", - " encoder_prepping_dict = {}\n", - "\n", - " # Prepare encoders that do not require learned strategies\n", - " for col_name, encoder in self.encoders.items():\n", - " if not encoder.is_trainable_encoder:\n", - " encoder_prepping_dict[col_name] = [\n", - " encoder,\n", - " concatenated_train_dev[col_name],\n", - " \"prepare\",\n", - " ]\n", - " log.info(\n", - " f\"Encoder prepping dict length of: {len(encoder_prepping_dict)}\"\n", - " )\n", - "\n", - " # Setup parallelization\n", - " parallel_prepped_encoders = mut_method_call(encoder_prepping_dict)\n", - " for col_name, encoder in parallel_prepped_encoders.items():\n", - " self.encoders[col_name] = encoder\n", - "\n", - " # Prepare the target\n", - " if self.target not in parallel_prepped_encoders:\n", - " if self.encoders[self.target].is_trainable_encoder:\n", - " self.encoders[self.target].prepare(\n", - " data[\"train\"][self.target], data[\"dev\"][self.target]\n", - " )\n", - " else:\n", - " self.encoders[self.target].prepare(\n", - " pd.concat([data[\"train\"], data[\"dev\"]])[self.target]\n", - " )\n", - "\n", - " # Prepare any non-target encoders that are learned\n", - " for col_name, encoder in self.encoders.items():\n", - " if encoder.is_trainable_encoder:\n", - " priming_data = pd.concat([data[\"train\"], data[\"dev\"]])\n", - " kwargs = {}\n", - " if self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"] = {}\n", - " for col in self.dependencies[col_name]:\n", - " kwargs[\"dependency_data\"][col] = {\n", - " \"original_type\": self.dtype_dict[col],\n", - " \"data\": priming_data[col],\n", - " }\n", - "\n", - " # If an encoder representation requires the target, provide priming data\n", - " if hasattr(encoder, \"uses_target\"):\n", - " kwargs[\"encoded_target_values\"] = parallel_prepped_encoders[\n", - " self.target\n", - " ].encode(priming_data[self.target])\n", - "\n", - " encoder.prepare(\n", - " data[\"train\"][col_name], data[\"dev\"][col_name], **kwargs\n", - " )\n", - "\n", - " def featurize(self, split_data: Dict[str, pd.DataFrame]):\n", - " # Featurize data into numerical representations for models\n", - "\n", - " log.info(\"Featurizing the data\")\n", - " feature_data = {key: None for key in split_data.keys()}\n", - "\n", - " for key, data in split_data.items():\n", - " feature_data[key] = EncodedDs(self.encoders, data, self.target)\n", - "\n", - " return feature_data\n", - "\n", - " def fit(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Fit predictors to estimate target\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " log.info(\"Training the mixers\")\n", - "\n", - " # --------------- #\n", - " # Fit Models\n", - " # --------------- #\n", - " # Assign list of mixers\n", - " self.mixers = [\n", - " Neural(\n", - " fit_on_dev=True,\n", - " search_hyperparameters=True,\n", - " net=\"DefaultNet\",\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target_encoder=self.encoders[self.target],\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " ),\n", - " LightGBM(\n", - " fit_on_dev=True,\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " input_cols=self.input_cols,\n", - " ),\n", - " Regression(\n", - " stop_after=self.problem_definition.seconds_per_mixer,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " target_encoder=self.encoders[self.target],\n", - " ),\n", - " ]\n", - "\n", - " # Train mixers\n", - " trained_mixers = []\n", - " for mixer in self.mixers:\n", - " try:\n", - " mixer.fit(encoded_train_data, encoded_dev_data)\n", - " trained_mixers.append(mixer)\n", - " except Exception as e:\n", - " log.warning(f\"Exception: {e} when training mixer: {mixer}\")\n", - " if True and mixer.stable:\n", - " raise e\n", - "\n", - " # Update mixers to trained versions\n", - " self.mixers = trained_mixers\n", - "\n", - " # --------------- #\n", - " # Create Ensembles\n", - " # --------------- #\n", - " log.info(\"Ensembling the mixer\")\n", - " # Create an ensemble of mixers to identify best performing model\n", - " self.pred_args = PredictionArguments()\n", - " self.ensemble = BestOf(\n", - " ts_analysis=None,\n", - " data=encoded_test_data,\n", - " accuracy_functions=self.accuracy_functions,\n", - " target=self.target,\n", - " mixers=self.mixers,\n", - " )\n", - " self.supports_proba = self.ensemble.supports_proba\n", - "\n", - " def analyze_ensemble(self, enc_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Evaluate quality of fit for the ensemble of mixers\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data into train/dev/test\n", - " encoded_train_data = enc_data[\"train\"]\n", - " encoded_dev_data = enc_data[\"dev\"]\n", - " encoded_test_data = enc_data[\"test\"]\n", - "\n", - " # --------------- #\n", - " # Analyze Ensembles\n", - " # --------------- #\n", - " log.info(\"Analyzing the ensemble of mixers\")\n", - " self.model_analysis, self.runtime_analyzer = model_analyzer(\n", - " data=encoded_test_data,\n", - " train_data=encoded_train_data,\n", - " stats_info=self.statistical_analysis,\n", - " ts_cfg=self.problem_definition.timeseries_settings,\n", - " accuracy_functions=self.accuracy_functions,\n", - " predictor=self.ensemble,\n", - " target=self.target,\n", - " dtype_dict=self.dtype_dict,\n", - " analysis_blocks=self.analysis_blocks,\n", - " )\n", - "\n", - " def learn(self, data: pd.DataFrame) -> None:\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # Perform stats analysis\n", - " self.analyze_data(data)\n", - "\n", - " # Pre-process the data\n", - " clean_data = self.preprocess(data)\n", - "\n", - " # Create train/test (dev) split\n", - " train_dev_test = self.split(clean_data)\n", - "\n", - " # Prepare encoders\n", - " self.prepare(train_dev_test)\n", - "\n", - " # Create feature vectors from data\n", - " enc_train_test = self.featurize(train_dev_test)\n", - "\n", - " # Prepare mixers\n", - " self.fit(enc_train_test)\n", - "\n", - " # Analyze the ensemble\n", - " self.analyze_ensemble(enc_train_test)\n", - "\n", - " # ------------------------ #\n", - " # Enable model partial fit AFTER it is trained and evaluated for performance with the appropriate train/dev/test splits.\n", - " # This assumes the predictor could continuously evolve, hence including reserved testing data may improve predictions.\n", - " # SET `json_ai.problem_definition.fit_on_all=False` TO TURN THIS BLOCK OFF.\n", - "\n", - " # Update the mixers with partial fit\n", - " if self.problem_definition.fit_on_all:\n", - "\n", - " log.info(\"Adjustment on validation requested.\")\n", - " update_data = {\n", - " \"new\": enc_train_test[\"test\"],\n", - " \"old\": ConcatedEncodedDs(\n", - " [enc_train_test[\"train\"], enc_train_test[\"dev\"]]\n", - " ),\n", - " } # noqa\n", - "\n", - " self.adjust(update_data)\n", - "\n", - " def adjust(self, new_data: Dict[str, pd.DataFrame]) -> None:\n", - " # Update mixers with new information\n", - "\n", - " self.mode = \"train\"\n", - "\n", - " # --------------- #\n", - " # Extract data\n", - " # --------------- #\n", - " # Extract the featurized data\n", - " encoded_old_data = new_data[\"old\"]\n", - " encoded_new_data = new_data[\"new\"]\n", - "\n", - " # --------------- #\n", - " # Adjust (Update) Mixers\n", - " # --------------- #\n", - " log.info(\"Updating the mixers\")\n", - "\n", - " for mixer in self.mixers:\n", - " mixer.partial_fit(encoded_new_data, encoded_old_data)\n", - "\n", - " def predict(self, data: pd.DataFrame, args: Dict = {}) -> pd.DataFrame:\n", - "\n", - " # Remove columns that user specifies to ignore\n", - " log.info(f\"Dropping features: {self.problem_definition.ignore_features}\")\n", - " data = data.drop(\n", - " columns=self.problem_definition.ignore_features, errors=\"ignore\"\n", - " )\n", - " for col in self.input_cols:\n", - " if col not in data.columns:\n", - " data[col] = [None] * len(data)\n", - "\n", - " # Clean the data\n", - " self.mode = \"predict\"\n", - " log.info(\"Cleaning the data\")\n", - " data = cleaner(\n", - " data=data,\n", - " pct_invalid=self.problem_definition.pct_invalid,\n", - " identifiers=self.identifiers,\n", - " dtype_dict=self.dtype_dict,\n", - " target=self.target,\n", - " mode=self.mode,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " )\n", - "\n", - " # Featurize the data\n", - " encoded_ds = EncodedDs(self.encoders, data, self.target)\n", - " encoded_data = encoded_ds.get_encoded_data(include_target=False)\n", - "\n", - " self.pred_args = PredictionArguments.from_dict(args)\n", - " df = self.ensemble(encoded_ds, args=self.pred_args)\n", - "\n", - " if self.pred_args.all_mixers:\n", - " return df\n", - " else:\n", - " insights, global_insights = explain(\n", - " data=data,\n", - " encoded_data=encoded_data,\n", - " predictions=df,\n", - " ts_analysis=None,\n", - " timeseries_settings=self.problem_definition.timeseries_settings,\n", - " positive_domain=self.statistical_analysis.positive_domain,\n", - " anomaly_detection=self.problem_definition.anomaly_detection,\n", - " analysis=self.runtime_analyzer,\n", - " target_name=self.target,\n", - " target_dtype=self.dtype_dict[self.target],\n", - " explainer_blocks=self.analysis_blocks,\n", - " fixed_confidence=self.pred_args.fixed_confidence,\n", - " anomaly_error_rate=self.pred_args.anomaly_error_rate,\n", - " anomaly_cooldown=self.pred_args.anomaly_cooldown,\n", - " )\n", - " return insights\n", - "\n" - ] - } - ], - "source": [ - "# Make changes to your JSON-file and load the custom version\n", - "with open('custom.json', 'r') as fp:\n", - " modified_json = JsonAI.from_json(fp.read())\n", - "\n", - "#Generate python code that fills in your pipeline\n", - "code = code_from_json_ai(modified_json)\n", - "\n", - "print(code)\n", - "\n", - "# Save code to a file (Optional)\n", - "with open('custom_splitter_pipeline.py', 'w') as fp:\n", - " fp.write(code)" - ] - }, - { - "cell_type": "markdown", - "id": "dental-beauty", - "metadata": {}, - "source": [ - "As you can see, an end-to-end pipeline of our entire ML procedure has been generating. There are several abstracted functions to enable transparency as to what processes your data goes through in order to build these models.\n", - "\n", - "The key steps of the pipeline are as follows:\n", - "\n", - "(1) Run a **statistical analysis** with `analyze_data`
\n", - "(2) Clean your data with `preprocess`
\n", - "(3) Make a training/dev/testing split with `split`
\n", - "(4) Prepare your feature-engineering pipelines with `prepare`
\n", - "(5) Create your features with `featurize`
\n", - "(6) Fit your predictor models with `fit`
\n", - "\n", - "You can customize this further if necessary, but you have all the steps necessary to train a model!\n", - "\n", - "We recommend familiarizing with these steps by calling the above commands, ideally in order. Some commands (namely `prepare`, `featurize`, and `fit`) do depend on other steps.\n", - "\n", - "If you want to omit the individual steps, we recommend your simply call the `learn` method, which compiles all the necessary steps implemented to give your fully trained predictive models starting with unprocessed data! " - ] - }, - { - "cell_type": "markdown", - "id": "amended-oklahoma", - "metadata": {}, - "source": [ - "### 6) Call python to run your code and see your preprocessed outputs\n", - "\n", - "Once we have code, we can turn this into a python object by calling `predictor_from_code`. This instantiates the `PredictorInterface` object. \n", - "\n", - "This predictor object can be then used to run your pipeline." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "organic-london", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MyCustomCleaner.py\n", - "MyCustomCleaner\n", - "MyCustomSplitter.py\n", - "MyCustomSplitter\n" - ] - } - ], - "source": [ - "# Turn the code above into a predictor object\n", - "predictor = predictor_from_code(code)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "fabulous-prime", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:lightwood-51500:Cleaning the data\n", - "INFO:lightwood-51500:Splitting the data into train/test\n", - "/home/natasha/lightwood_modules/MyCustomSplitter.py:56: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame.\n", - "Try using .loc[row_indexer,col_indexer] = value instead\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " X_test[target] = y_test\n", - "/home/natasha/lightwood_modules/MyCustomSplitter.py:57: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame.\n", - "Try using .loc[row_indexer,col_indexer] = value instead\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " X_dev[target] = y_dev\n" - ] - } - ], - "source": [ - "# Pre-process the data\n", - "cleaned_data = predictor.preprocess(data)\n", - "train_test_data = predictor.split(cleaned_data)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "suspended-biography", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABP4AAAFVCAYAAAB/4yFKAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAA6fElEQVR4nO3df5yldV3//8dTQAVi11XAH4QCrolQflI3f6QlaqlgG6mY/dAiE0Irv6al5cdvrvTVAgMttXAjw8ofpaG2iSGIkFakYEmIqIkrKv5AXQZ18UfL6/vHdY0chjOzZ2bPzLnOdR732+3cZs51vc91vWZ2mCfzuq7zfqeqkCRJkiRJktQvt5t0AZIkSZIkSZLGz8afJEmSJEmS1EM2/iRJkiRJkqQesvEnSZIkSZIk9ZCNP0mSJEmSJKmHbPxJkiRJkiRJPWTjT+qBJIclqSRbJl2LJEmSJEnqBht/0ipom3CjPg6bdL2SJM1LcsyCnNqVZEeSK5O8Icnjk2TSdUqSZsuk/sZKcmKS547reNJa23vSBUg99fQFz38MOBnYCrx/wb7rx3C+zwD7Av87hmNJkgTwZuA8IMABwH2BnwF+CbgwyVOq6oaJVSdJmjVr/TfWvBOBw4BXjfGY0pqx8Setgqr628HnSfamCaV/X7hvoSQHVNXXl3m+Ar617EIlSVrch4fk2fOA04Hn0TQGj51EYZKk2bMnf2NJs8y3+koTlGR7kouTPCDJ+UnmgCvafQck+f+S/EeSryT5dpL/SfJHSfZbcJzbzPE3uC3JTyX5UJJvJflCkle0QTl4jP2SHJnk7mvxtUuSpk9V7aqq5wMfAB6f5BHz+5KsT3Jam1XfTnJ9kjcnOWJgzLFtNj1n2PGT/Hv7un3MJUnSSqTxrCSXJ9mZ5BtJ3pfkUUPG/lKSDya5Ick3k1yT5I1JDmr3bwceCdxrwVuJj2n3m1XqPBt/0uTdE7iI5u26vwO8ut1+CPBM4DLgD2jurvgw8ALg7cs4/nHA64F3A78FfAT47fY4gx4MfAz4w5V8EZKkmfKX7ccnQNP0A/4NeDbwLuA3gdcAjwb+I8m92vHvAb5I83bhW0lyH+ChwJuq6ruYS5Kklfkbmgz6H5q/eV4CrAcuSPLT84OSPB14A807p34feC7wtzRTWxzcDnsucDXwFZq3Gs8/PtbuN6vUeb7VV5q8w4GTqursBduvAQ5t//iZ99okfwC8OMmDq+qDIxz/aODoqtoOkOQs4L9p/ih7+R5XL0maRVe0H3+g/XgqcATw0Kr6yPygJOfQZM5LgROraleSvwV+O8lRVXXVwDHnm4FvWNXKJUm9leSJwC8Cv1ZVWwe2/wlwKfAnSba1UyU9Efg68OiqGpwr/ffnP6mqd7QLe+zr24k1rbzjT5q8rwF/tXBjVX1nvumXZO8kG5IcCFzYDnnIiMd/x3zTrz1uAe8D7pbk+wa2X1xVqaoTV/ZlSJJmyI3tx3XtCr+/CPwL8PkkB84/gG/S/KH12IHXzjf2vnfXX3uMpwFXVtWHwVySJK3I02iaee9YkEd3ArbRLNJxn3bsHLAf8ISVrlZvVmkaeMefNHmfqqpdw3YkeTZwCs1dewsb9RtGPP41Q7Z9tf14F+AbIx5HkqR569qPNwIH0eTJY1l8FcWb5z+pqiuTfBj4xSQvqqqbgR+n+WNs4TQUkiQtx/1oVqL/0hJj7gp8gubdTz8OvAP4apJLaKZH+rvlLrYodZmNP2nydg7b2K6ceAbNfEh/ClwHfIdm7r9zGP2O3aFNxfnTjFylJEm3uH/78ePckiUXAqeN+Pq/Bl5FMwfghTR3/+2imVtJkqSVCs1FqF9YYsyVAFX1ySRHAY9pH48E/gJ4aZIfr6pPrXax0lqw8Sd119OB7cCx7d0QACR5/MQqkiSp8avtx3fR/IF1A7Cuqi5c9BW39ibgFcAvJflX4ATggqr6wrgLlSTNlE/SzD97aVXt9p1NVfVt4Lz2QZLjaLLtecCvzw9bnVKlteEcf1J37aIJme/dlZdkb+B3V+NkLkUvSdqdJHsl+WPgEcB5VfWv7cWpNwIPTnLCIq87ePB5VV1P83aqJ9HMD7iOBYt6mEuSpBX4a5o+x9BVdpPcdeDzA4cM+XD78c4D274BbBg2D6BZpWngHX9Sd72NJrDeneRcmj+KfgH47pKvWrkH0yz68QbgxFU6hyRpejwwydPazw8A7gv8DHAvmmkoBt9G9X+BhwN/n+TvaRb0+E479jjgcm6bLW8AfppmWos5mjmWBplLkqRlqaq3Jfkr4DeSPBD4J+ArwPcDDwM20qxCD/CeJDcA7wc+S7MAyIk0N1/8zcBhLwV+CnhNkn+juUHjoqr6MmaVpoCNP6m7XkFzt9+vAn8CfBH4O5oVgK+aYF2SpNnw8+3jZpq7HT4HXAK8uar+eXBgVc0leTjwfOBngeOB/21f8wHg7CHH/yeale3vDJxdVd9apa9DkjRDquoZSd4HnAz8HnB7mr+lPtw+n/fnNJn1azRZ9FXgP4HfrKr3DYx7JU2z8ASahRdvBzwK+PLqfiXSeKTKt6tLkiRJkiRJfeMcf5IkSZIkSVIP2fiTJEmSJEmSesjGnyRJkiRJktRDNv4kSZIkSZKkHrLxJ0mSJEmSJPWQjT9JkiRJkiSph2z8adUlOTFJJTlmkuecRB2TPO+eSHJgkr9Ocl1b+8VjPv45SWqcx5SkPWFWmVVDjm9WSeoMc8qcGnJ8c0ojsfGnkSU5pv2FNf/YlWRHkiuTvCHJ45NkzOfckuRnxnnM1dB+b7YkudOkaxmTM4CnAmcBTwdetrsXJNk7yTOSXJDk+iTfSfLVJO9L8ptJ9lvtoiXJrFqcWWVWSZo8c2px5pQ5pdWRKhvEGk17deV9wJuB84AABwD3BX4GuCdwIfCUqrph4HV7AfsA36mqm5d5zgLeUFUnLvN1tzlnkhOBvwIeVVUXL+d4I5xvC/AS4PCq2r67WrouyXXAZVX10yOOPwj4R+ChwH8A24AvAHcCfhz4KeDcqvrZdvw5wC9X1Vj/p0aSzKolz7cFs8qskjRR5tSS59uCOWVOaez2nnQBmkofrqq/HdyQ5HnA6cDzaELs2Pl9VbUL2LUWhSU5oKq+vpbn3J0u1bIMdwO+NsrA9ork22gC6jlV9eoFQ85Mch/gKeMtUZKWZFYtQ5dqWQazStI0M6eWoUu1LIM5pU7wrb4ai6raVVXPBz4APD7JI+b3ZfjcEHdsb+P+eJKdSW5I8t9JXtHuPyy3zFfwy4O3ww8co9LMa/CYJB9I8g2aqyK7mwNi7/bcn0ny7SRXJPm5hYPmjz9k+62O3Y55Sbv70wO1blmqljRzPrw2yWfbW7g/2z6/yyLne3SS307yqbbuTyT55SFf31BJ9k/yhwOv/2KaOSfuNTBmS/s9Drf+vp+4xKF/iuYK1N8NCSgAquqTVfXy3dR3ZJI/S/LRJF9vfy4uT/LMIWPvnOSV7dfyrTS3v1+e5HcWjPulJB9sf76+meSaJG9sr6YNjrtPkr9J8oX232J7klck2X/BuEOTvH7gZ+fLSf5tOf8OkibHrDKrMKskdZg5ZU5hTmkVeMefxu0vgUcAT6AJrMW8FngG8NfAmTQ/i/cBHt3uv55mHoS/Ad4PbF3kOJuAJwN/AbxhxBpPA/YH/qx9/ivAm5PcsarOGfEYg14HrAOeCPwW8JV2+xWLvSDJeuDfgI3A64EPAw8AngU8OsmDq+rrC172cmDf9nzfbseek+R/qupflyowyT7A+cDDaa4knUHz/X4W8Ngkm6rqc8C5wP9w2+/7vy1x+BPaj4v9G43qGJqw+yfg0zT/Rk8B/iLJQVX1hwNj39qOPYvm+7wvcL/2GPP/o/N0mp+J9wO/D9wEHAocBxxM8zNGkgcBFwE30HxvPw/8H+A5wMOTPLKqvptkb+AC4BCan51PAOuB+wM/xug/f5Imz6wyq1bqGMwqSavPnDKnVuoYzCktVFU+fIz0oPkFUMBvLzHmge2YfxjYdmK77ZiBbV8DzhvhnAWcs8S+An5iyL5h55zf9hlg/cD29e22rwH77u7cixx7S7vtsBHHv6zd9uwFY3+93f4HQ17/n8DtB7YfQhNWbx7h+3hSe4zTF2x/Qrv9b0b9vg859uXt+Dsv42fpnObXz6227T9k3O2Ai4E5YJ+Bf68C/mw35zgXuBHYezfjPgJcDRywYPsT2/Oc2D6/f/v8BXvy35EPHz5W92FWLXlss8qs8uHDx4Qf5tSSxzanzCkfq/Dwrb4atxvbj+t2M24OODrJD+7h+T5SVRcu8zV/XlVz80/az88CNtAE8Vp4Is3VkYVXdF7Xbn/ikNf8WVV9Z/5JVX2e5grJfUY8383A4BUequpdwH8BxydZ6e+D+X/rG5cctRtV9c35z9O8beEuwJ2B97TnOLLdfRNNOD8kyWFLHHIO2A94QjJ8ZbQkP0QTPm8C7tC+VeDAJAfSXF39JvDYgeMBPCrJwSv4EiV1h1k1GrNqAbNK0hoxp0ZjTi1gTmkYG38at1F/YT2XJhT+u51T4OwkK/lF+YnlFgh8bMi2q9qPR6zgeCtxOPDxqvrfwY3t808sUsc1Q7Z9FbjLkO3DznddVe0Ysu+jNCuJHTjCcYaZ/7c+YIWvByDJ9yX54yTX0gTRV2gCe37Z+w0AbVA/F/hBmvk/Pprk1Ukes+CQL6e56vgO4Pok/5DkmUkG67xf+/Gl7bkGH1+muTX+ru15P9PW8ljgC+38F6cn+ZE9+bolTYRZNRqzagGzStIaMadGY04tYE5pGBt/Grf7tx8/vtSgqnoncBjNnBMXAY+h+WVycZLbL+N8O5df4h6b1NyYi61iNenl269sPz5gD4/zJpoVzM4DfhF4PPCTwCvb/d/7fVVVZ9H8/JxEM5fHCcCFSd4yMOaTwFE0t96/AbgXzbwlVye5dzts/nt3RnuuYY8XDBzzxTRXA58LfAp4JvDBJKft4dcuaW2ZVavHrGqZVZL2gDm1esypljk1O2z8adx+tf34rt0NrKqvVdXfVtVJNFdjTqeZ0PP4VawPbrkiMeio9uPgFaCv0dwWvdCwK0e1zBquAe7bTm76Pe3zH2D4lag9cQ1wjyR3GrLvKJorTF8Zsm8U/9B+vM1KUaNq6/opmnkxTqmqN1XV+e1bDr4z7DVV9YWqOruqng58P/Bm4KmDV4uq6ttVdV5VPb+qNtEE1j1owhDgk+3HXVV14SKPyxec95qqenVV/Wx7rH8BXuCt6tJUMatGY1YNMKskrSFzajTm1ABzSoux8aexSLJXkj+mWX3qvFpiRaR27J0Gt1XV/ESrcOtg+AbDg2JPPCvNClDz9awHTqFZgeiSgXGfAB6WZL+BsRtoVqxa6Bvtx1FrfQdwELf9xX5Su/3tIx5nVO+g+e/9dwc3JjmW5qrSP1bVzSs89jaaX9Q/n+TZwwYk2Zjk95Y4xvyVt1tdaUtydxZ8j5LsN/hvAlBVu7hlxa87t+OG3Wb/4cExND9zVwKnJLnN/3wk2TvJ/PHWp1nJa/C83+KWtzlsWPSrk9QJZpVZhVllVkkdZk6ZU5hT5tQqmNTttZpuD0zytPbzA4D7Aj9Dc9vve4Bf2M3rD6B5P/8/0vyS+DLNfAnPAnbQ/NKbdynwE0leCFxLk2dvYc98BfiPJH/VPv8V4J7AM6tq8Db31wB/C1yU5G+AO9GEyGeAuy045qXtx9OSvBH4FnBlVV3JcKfTLKv+2iQPpPk+PIDm6t7H2/3jdA7wy8AL00ze+i80y94/G/gS8KKVHriqKskJNP9ur02z5Ps/Al+k+Z49AvhpbrmKNewYX0/yHuBpSW4CPkTz8/RrNMvQD8658QPAJUneThMwO2iuOD6rHfv+dtx7ktzQPv9sW8uJtCtuDdQ+/9aIK5K8nmZ+jv1ovj9PAn6P5vv3KGBrkn+g+Tf6BvAgmhD9j6pa8q0YktacWWVWfY9ZZVZJHWROmVPfY06ZU6uqOrC08EofNJNEnkPzA/NRFizj7WPs3+9juGW596K5ojDXfu/fADx+kdedyMDS68DtaVZC+iDNRKrfBrYDrwfus+C196EJvhvnzzuwb6ll6W91zgXbfoJm4tFr23P/N/ALixznd2hC6ds0VyGeMezY7dgX0Nz+/d12/5bFamm3HwT8GfC59jWfA14LHLi7r2Vg38XA9hH//fZvv+/X0Nzq/WWaX9b3GjJ25KXnB16zD03IXkjzPwLfbf99L6IJw30Hxp4z+G/ZbjsQOBu4jibk/5vmfwoW/vzchWaOiv+iuaJ4E/A/wKuAuw8c7yTgApqw/A7wBZq5Lh41pPZ70axCtr0d+1Xg8vb7dWg75vB2zMdofh6/2X5+KrB+0v99+ujuA7Nqrb/fx2BWmVWLH9+s8uFjwQNzaq2/38dgTplTix/fnPIx9kfab/5USnIWcG1Vvbx9fnBVfXnCZUmS9D1mlSSpy8wpSeq3NZ/jr31f+uuSXJFkV5KLFxl3VJL3JtmZ5LokpybZa2D/ATS3Qr9ifpsBJUkaB7NKktRl5pQkaVSTmOPvaOA4mvfv7zNsQDvZ54XAVTSrEd2bZmno2wEvbocdAVwP/EmSh9K83/z/qartq1m8JGkmmFWSpC4zpyRJI5nEqr7bqurQqnoKzTwGw5wC7As8qaouqKqzaOYPeF6Sde2YvYEfBN5ZVQ8E3kkzJ4IkSXvKrJIkdZk5JUkayZo3/mq05a2PBc6vqhsHtr2FJrge2T7/HDBXVecP7H/Q2AqVJM0ss0qS1GXmlCRpVJN4q+8ojqRZteZ7quraJDvbfduq6kvtnBY/UlUfAn6SZsWaoZKcDJwMsP/++z/oyCOPXL3qJUkju/zyy79SVQdNuo4VGGtWmVOS1E3m1C3MKknqpqWyqquNvw00S0ovtKPdN+8U4Owk+9Msgf6MxQ5YVVuBrQCbNm2qyy67bGzFSpJWLslnJl3DCo01q8wpSeomc+oWZpUkddNSWdXVxt9Iquoq4EdHHZ9kM7B548aNq1eUJEkDlpNV5pQkaa35N5Uk9dskFvcYxQ5g/ZDtG9p9K1JV26rq5PXrhx1akqRlGXtWmVOSpDHybypJUmcbf1fTzDvxPUkOBfZr961Iks1Jts7Nze1heZIkjT+rzClJ0hj5N5UkqbONv3cDj0tywMC2pwI3AZes9KBenZIkjdHYs8qckiSNkX9TSZLWfo6/JPsBx7VPDwHWJTmhfX5eVe0EzgKeA5yb5DTgCGALcOaC5eiXe27no5Ak7dakssqckiSNwr+pJEmjSlWt7QmTw4BPL7L78Kra3o47CngN8DCa1ajOBrZU1a49rcEVqCSpO5JcXlWbJl3HoElnlTklSd1hTg1nVklSdyyVVWt+x18bQhlh3FXAo1e9IEmSFjCrJEldZk5JkkbV1Tn+VoUT0UqSusyckiR1nVklSdNlphp/TkQrSeoyc0qS1HVmlSRNl5lq/EmSJEmSJEmzYqYaf96WLknqMnNKktR1ZpUkTZeZavx5W7okqcvMKUlS15lVkjRdZqrxJ0mSJEmSJM0KG3+SJEmSJElSD81U48/5KCRJXWZOSZK6zqySpOkyU40/56OQJHWZOSVJ6jqzSpKmy0w1/iRJkiRJkqRZYeNPkiRJkiRJ6iEbf5IkSZIkSVIPzVTjz4loJUldZk5JkrrOrJKk6TJTjT8nopUkdZk5JUnqOrNKkqbLTDX+JEmSJEmSpFlh40+SJEmSJEnqIRt/kiRJkiRJUg/Z+JMkSZIkSZJ6yMafJEmSJEmS1EN7T7qAaXTY775r0iWsiu1/9IRJlyD1lr83JEmSJElrbabu+EuyOcnWubm5SZciSdJtmFOSpK4zqyRpusxU46+qtlXVyevXr590KZIk3YY5JUnqOrNKkqbLTDX+JEmSJEmSpFlh40+SJEmSJEnqIRt/kiRJkiRJUg/Z+JMkSZIkSZJ6aO9JF7CnkmwHdgLfaTf9QlVdNbmKJEm6hTklSeo6s0qS+mvqG3+t46pq+6SLkCRpEeaUJKnrzCpJ6qGJvNU3ycYkr0tyRZJdSS5eZNxRSd6bZGeS65KcmmSvNS5XkjRjzClJUteZVZKkUUzqjr+jgeOAS4F9hg1IsgG4ELgKOB64N3AGTbPyxQuGvyNJgH8CtlTVd1epbknSbDCnJEldZ1ZJknZrUot7bKuqQ6vqKcBHFxlzCrAv8KSquqCqzgJeCjwvybqBcY+oqh8GHg4cBfz2KtYtSZoN5pQkqevMKknSbk2k8VdVN48w7Fjg/Kq6cWDbW2iC65EDx/pc+/EbwF8CPzrGUiVJM8ickiR1nVklSRrFpO74G8WRwNWDG6rqWprVpo4ESLL//JWqJHsDTwauGHawJCcnuSzJZddff/2qFi5JmgnmlCSp68wqSZpxXW78bQBuGLJ9R7sP4K7AvyS5AvgIsAt42bCDVdXWqtpUVZsOOuigVShXkjRjzClJUteZVZI04ya1uMdYVNU1wA+POj7JZmDzxo0bV60mSZLmmVOSpK4zqySp37p8x98OYP2Q7RvafctWVduq6uT164cdVpKkZTGnJEldZ1ZJ0ozrcuPvatp5J+YlORTYjwXzVIwqyeYkW+fm5sZQniRpxplTkqSuM6skacZ1ufH3buBxSQ4Y2PZU4CbgkpUc0KtTkqQxMqckSV1nVknSjJvIHH9J9gOOa58eAqxLckL7/Lyq2gmcBTwHODfJacARwBbgzAXL0S/nvM5HIUnaLXNKktR1ZpUkaRSTWtzjYOCtC7bNPz8c2F5VO5I8BngNsI1mNapX0gTVilTVNmDbpk2bTlrpMSRJM8GckiR1nVklSdqtiTT+qmo7kBHGXQU8etULkiRpgDklSeo6s0qSNIouz/E3dk5EK0nqMnNKktR1ZpUkTZeZavw5Ea0kqcvMKUlS140tq5J+PiSpY2aq8SdJkiRJkiTNiplq/HlbuiSpy8wpSVLXmVWSNF1mqvHnW6gkSV1mTkmSus6skqTpMlONP0mSJEmSJGlW2PiTJEmSJEmSemimGn/ORyFJ6jJzSpLUdWaVJE2XmWr8OR+FJKnLzClJUteZVZI0XWaq8SdJkiRJkiTNCht/kiRJkiRJUg/Z+JMkSZIkSZJ6aKYaf05EK0nqMnNKktR1ZpUkTZeZavw5Ea0kqcvMKUlS15lVkjRdZqrxJ0mSJEmSJM0KG3+SJEmSJElSD9n4kyRJkiRJknpo70kXIEmSxivJpEtYFVU16RIkSZKkqeIdf5IkSZIkSVIP2fiTJEmSJEmSemimGn9JNifZOjc3N+lSJEm6DXNKktR1ZpUkTZeZavxV1baqOnn9+vWTLkWSpNswpyRJXWdWSdJ0manGnyRJkiRJkjQrbPxJkiRJkiRJPWTjT5IkSZIkSeohG3+SJEmSJElSD/Wi8ZfktUlq0nVIkrQYs0qS1GXmlCT109Q3/pL8GPB9k65DkqTFmFWSpC4zpySpv9a88ZdkY5LXJbkiya4kFy8y7qgk702yM8l1SU5NsteCMXcA/gj47TUoXZI0I8wqSVKXmVOSpFHtPYFzHg0cB1wK7DNsQJINwIXAVcDxwL2BM2galS8eGPr7wF9W1fVJVrNmSdJsMaskSV1mTkmSRjKJxt+2qnonQJK3AQcOGXMKsC/wpKq6EbggyTpgS5LTq+rGJPcHHsKtQ0uSpHEwqyRJXWZOSZJGsuZv9a2qm0cYdixwfhtQ895CE1yPbJ8/HDgK+HSS7QBJtic5aIzlSpJmkFklSeoyc0qSNKquLu5xJHD14IaquhbY2e6jqv68qu5RVYdV1WHttsOq6vphB0xycpLLklx2/fVDh0iStBxjzSpzSpI0Zv5NJUkarfGX5OAkhw88T/tL/1VJNq9CXRuAG4Zs39HuW7aq2lpVm6pq00EHeQFLkvpm2rPKnJKkfpv2nAKzSpKm0ah3/J0D/NbA81OBPwMeD7w9yYnjLWv5qmq3M9Em2Zxk69zc3FqUJElaW+cw5VllTklSr53DlOcUmFWSNG1Gbfw9ELgIIMntaCaKfVFVHQm8DHjumOvaAawfsn1Du29FqmpbVZ28fv2wQ0uSptzUZ5U5JUm9NvU5BWaVJE2bURt/64Gvtp8/CLgz8Mb2+UXAxjHXdTXtvBPzkhwK7MeCeSqWw6tTktRrU59V5pQk9drU51R7DLNKkqbIqI2/z9Gs9gTwBODqqvp8+3w98K0x1/Vu4HFJDhjY9lTgJuCSlR7Uq1OS1GtTn1XmlCT12tTnFJhVkjRt9h5x3OuB05P8BE1I/d7AvocCHxv1hEn2A45rnx4CrEtyQvv8vKraCZwFPAc4N8lpwBHAFuDMBcvRL0s7ae7mjRvHfTFNktQBU59V5pQk9drU51R7brNKkqbISI2/qvrDJJ8HfgT4TZrQmndn4OxlnPNg4K0Lts0/PxzYXlU7kjwGeA2wjWY1qlfSBNWKVdU2YNumTZtO2pPjSJK6pw9ZZU5JUn/1IafArJKkaTNS4y/JPYE3V9VfD9n9m8DdRz1hVW0HdrtaVFVdBTx61ONKkmabWSVJ6jJzSpI0CaPO8fdp4AGL7Lt/u7/znIhWknpt6rPKnJKkXpv6nAKzSpKmzaiNv6WuJt0R+PYYall1TkQrSb029VllTklSr019ToFZJUnTZtG3+ia5P/DDA5uOS3LkgmF3BH4W+MT4S5MkaWlmlSSpy8wpSdKkLTXH3xOBl7SfF/D7i4z7NPBr4yxqtbgClST1Tq+yypySpN7pVU6BWSVJ02apt/q+HDgAWEdzW/qj2+eDjztU1b2r6sLVLnQcvC1dknqnV1llTklS7/Qqp8CskqRps+gdf1X1XeC77dNR5wKUJGnNmFWSpC4zpyRJk7bUW31vI8kPAN9PMw/FrVTVeeMqSpKklTKrJEldZk5JktbSSI2/JEcBbwGOZvhqVAXsNca6VoXzUUhSf/Uhq8wpSeqvPuQUmFWSNG1Gvd38dcAdgCcB9wUOX/A4YlWqGzPno5CkXpv6rDKnJKnXpj6nwKySpGkz6lt9HwD8XFX902oWI0nSHjCrJEldZk5JktbcqHf8fYohc1BIktQhZpUkqcvMKUnSmhu18fd84EVJpuL2c0nSTDKrJEldZk5JktbcqG/1/UPgEODqJNuBGxYOqKoHj6+s1eFEtJLUa1OfVeaUJPXa1OcUmFWSNG1Gbfxd2T6mWlVtA7Zt2rTppEnXIkkau6nPKnNKknpt6nMKzCpJmjYjNf6q6ldWuxBJkvaEWSVJ6jJzSpI0CaPO8SdJkiRJkiRpiox0x1+Sv9/dmKr62T0vR5KklTGrJEldZk5JkiZh1Dn+DhqybQNwJPBV4ONjq0iSpJUxqyRJXWZOSZLW3Khz/D1q2PYkhwJvB145zqIkSVous0qS1GXmlCRpEvZojr+q+izNsvSnj6ccSZLGy6ySJHWZOSVJWk3jWNxjF/D9YzjOqkuyOcnWubm5SZciSVpbU5FV5pQkzaypyCkwqyRp2oy6uMdRQzbfHrgf8AfAh8ZZ1Gqpqm3Atk2bNp006VokSePVh6wypySpv/qQU2BWSdK0GXVxjyuBGrI9wGXAM8dWkSRJK2NWSZK6zJySJK25URt/wyai/Rbwuar6/BjrkSRppcwqSVKXmVOSpDU36qq+l6x2IZIk7QmzSpLUZeaUJGkSRr3jjyR7A08GHgHcGfga8H7g3Kr639UpT5Kk0ZlVkqQuM6ckSWtt1MU9DgbeA9wf2A58CXgY8OvAR5I8tqquX60il6jrEuBONPNifAJ4RlXduNZ1SJImz6ySJHVZV3Oqrc2skqSeut2I484E7gI8tKqOqKqHVdURwEPa7WeuVoG78dNV9X+q6v7AtcDvTKgOSdLkmVWSpC7rak6BWSVJvTVq4+844IVV9cHBjVX1IeD3gCeMesIkG5O8LskVSXYluXiRcUcleW+SnUmuS3Jqkr0WnH+uHXs7YH+Gr5IlSZoNZpUkqcvGllNgVkmSRjPqHH93AL6+yL6vA7dfxjmPpgm9S4F9hg1IsgG4ELgKOB64N3AGTaPyxQvGngf8CPBR4PnLqEOS1C9mlSSpy8aZU2BWSZJGMOodf5cCL0yy/+DG9vkL2/2j2lZVh1bVU2hCZZhTgH2BJ1XVBVV1FvBS4HlJ1g0OrKrjgLsBHwSevYw6JEn9YlZJkrpsnDkFZpUkaQSjNv6eT3NF6bNJ3pLkT5K8GfgscBTLuCJUVTePMOxY4PwFE8q+hSa0HjnkmLuANwC/NGodkqTeMaskSV02tpwCs0qSNJqRGn9V9V/AfYCtwEHATwIHA2cB96mqj4y5riOBqxfUcC2ws91Hkg1J7jow5MnAlYsdMMnJSS5Lctn1109ksSxJ0iqa9qwypySp3yaQU2BWSdLMG3WOP6rqK8DvrmItgzYANwzZvqPdNz/m75LcgWbZ+Y8Bv7nYAatqK03IsmnTJierlaQemuasMqckqf/WOKfArJKkmbdo4y/JHYGTgA9V1dD5JpI8lGYC2NdV1XdWp8Thquqa9twjS7IZ2Lxx48bVKUqStKb6llXmlCT1S9dzCswqSeq7pd7q+2zg/7Lg1vAFPkaz9Pwp4yyK5grU+iHbN7T7VqSqtlXVyevXDzu0JGkK9SqrzClJ6p1J5hSYVZI085Zq/P0c8OqqumGxAVU1B7wG+MUx13U17ZwT85IcCuzH0qG5pCSbk2ydm5vbw/IkSR3Rq6wypySpdyaZU2BWSdLMW6rxdzTw7yMc49J27Di9G3hckgMGtj0VuAm4ZKUH9eqUJPVOr7LKnJKk3plkToFZJUkzb6nFPVZlstYk+wHHtU8PAdYlOaF9fl5V7aRZ2eo5wLlJTgOOALYAZy5Yin6553Y+Cknql15llTklSb2zagtgmFWSpFEsdcffJ4CHj3CMh7djR3Uw8Nb28VDgqIHnBwNU1Q7gMcBewDbgpcArgZcs4zy34dUpSeqdXmWVOSVJvbNaOQVmlSRpBEvd8fcm4EVJ3lZVHxs2IMn9gOcCLxv1hFW1nWaZ+N2Nuwp49KjHlSTNJLNKktRlq5JTYFZJkkazVOPvT4HjgQ8m+XPgfOBamtvV7wk8DngW8J/Aq1e5zrHwtnRJ6p1eZZU5JUm906ucArNKkqbNom/1rarvAD8JbKUJo/fQrPz0ceBCmqXptwKPq6rvrn6pe87b0iWpX/qWVeaUJPVL33IKzCpJmjZL3fFHVX0LeH6SFwMPopk0FuDzwGXtfkmSJsaskiR1mTklSZqkJRt/86rqJuADq1zLqvO2dEnqrz5klTklSf3Vh5wCs0qSps1Sq/r2jrelS5K6zJySJHWdWSVJ02WmGn+SJEmSJEnSrLDxJ0mSJEmSJPXQTDX+kmxOsnVubm7SpUiSdBvmlCSp68wqSZouM9X4cz4KSVKXmVOSpK4zqyRpuoy0qm+S1y+x+2bgRuC/gHOr6htjqEuSpGUxqyRJXWZOSZImYaTGH/BDwKHAwcCXgOuBg4C7Al8G5oDfAF6W5DFV9YlVqFWSpKWYVZKkLjOnJElrbtS3+v4+cAPwkKq6e1Xdv6ruDjyUJqB+B7gv8HXgFatRqCRJu2FWSZK6zJySJK25URt/pwMvqaoPDW6sqg8CW4DTqurTwB8BPz7WCsfIiWglqdemPqvMKUnqtanPKTCrJGnajNr42wjctMi+ncBh7eefAe6whzWtGieilaRem/qsMqckqdemPqfArJKkaTNq4+8/gZckudvgxiR3B14CXN5uuhdw3fjKkyRpZGaVJKnLzClJ0pobdXGPU4Dzge1JLueWiWgfBHwNeFw77h7AX4y7SEmSRmBWSZK6zJySJK25kRp/VXVFkiOAZwCbgLsBnwDeCPxVVd3Ujvuj1SpUkqSlmFWSpC4zpyRJkzDqHX+0QfTaVaxFkqQ9YlZJkrrMnJIkrbWRG38ASR4CPAK4M83t6O9vV6GSJKkTzCpJUpeZU5KktTRS4y/J/sBbgccD/wt8FbgLsFeSfwaeUlU7V61KSZJ2w6ySJHWZOSVJmoRRV/U9HXgY8FTgjlV1d+COwM+1209bnfLGK8nmJFvn5uYmXYokafymPqvMKUnqtanPKTCrJGnajNr4ezLwwqp6a1XdDFBVN1fVW4HfBZ6yWgWOU1Vtq6qT169fP+lSJEnjN/VZZU5JUq9NfU6BWSVJ02bUxt964LOL7PsssG485UiStGJmlSSpy8wpSdKaG7Xx9xHgWUkyuLF9/qx2vyRJk2RWSZK6zJySJK25UVf1fRHwbuDqJG8HvgQcDDwROJxmglpJkibJrJIkdZk5JUlacyM1/qrqoiQPBP5fmrkn7g58AfgP4ElVddXqlShJ0u6ZVZKkLjOnJEmTMOodf1TVR2lWnLqVJHdJ8uNV9S9jrWwESQ4FzgHuAdwMvItmwtxa61okSZPXtawypyRJg7qWU+25zSpJ6rFR5/hbyjHA+8ZwnJX4X5pQuh/wAOAhwJMmVIskqbuOYTJZZU5JkkZxDP5NJUlaBeNo/C1bko1JXpfkiiS7kly8yLijkrw3yc4k1yU5Ncle8/ur6gtVdVn7+XeAK4BD1+SLkCT1ljklSeo6s0qSNIqR3+o7ZkcDxwGXAvsMG5BkA3AhcBVwPHBv4AyaZuWLh4y/C/AzwGNXpWJJ0iwxpyRJXWdWSZJ2a1KNv21V9U6AJG8DDhwy5hRgX5qJbm8ELkiyDtiS5PR2G+0x7gC8DXhVVX1s9cuXJPWcOSVJ6jqzSpK0WxN5q29V3TzCsGOB8wfDCHgLTXA9cn5De5v6G4H/rKozxlqoJGkmmVOSpK4zqyRJo1j0jr8k1wOjrOR0h/GVcytHAhcNbqiqa5PsbPdtaze/Dvg68PylDpbkZOBkgHve855jL1aStPYmnFXmlCRpSf5NJUljlEy6gtWzigupL/VW39cyWkitlg3ADUO272j3keThwK8CVwL/meaH4PVV9acLX1RVW4GtAJs2bXJpeknqh0lmlTklSdod/6aSJE3Uoo2/qtqyhnWsSFX9KzByyzfJZmDzxo0bV68oSdKa6XpWmVOSNNu6nlNgVklS301kjr8R7QDWD9m+od23bFW1rapOXr9+2GElSVoWc0qS1HVmlSTNuC43/q6mmXfie5IcCuzX7lu2JJuTbJ2bmxtDeZKkGWdOSZK6zqySpBnX5cbfu4HHJTlgYNtTgZuAS1ZyQK9OSZLGyJySJHWdWSVJM26pxT1WTZL9gOPap4cA65Kc0D4/r6p2AmcBzwHOTXIacASwBThzwXL0yzmv81FIknbLnJIkdZ1ZJUkaxUQaf8DBwFsXbJt/fjiwvap2JHkM8BqaZeZvAF5JE1QrUlXbgG2bNm06aaXHkCTNBHNKktR1ZpUkabcm0virqu2MsHJUVV0FPHrVC5IkaYA5JUnqOrNKkjSKLs/xN3ZORCtJ6jJzSpLUdWaVJE2XmWr8ORGtJKnLzClJUteZVZI0XWaq8SdJkiRJkiTNiplq/HlbuiSpy8wpSVLXmVWSNF1mqvHnbemSpC4zpyRJXWdWSdJ0manGnyRJkiRJkjQrbPxJkiRJkiRJPTRTjT/no5AkdZk5JUnqOrNKkqbLTDX+nI9CktRl5pSkSUrSy4fGy6ySpOkyU40/SZIkSZIkaVbY+JMkSZIkSZJ6yMafJEmSJEmS1EMz1fhzIlpJUpeZU5KkrjOrJGm6zFTjz4loJUldZk5JkrrOrJKk6TJTjT9JkiRJkiRpVtj4kyRJkiRJknrIxp8kSZIkSZLUQzb+JEmSJEmSpB6y8SdJkiRJkiT1kI0/SZIkSZIkqYdmqvGXZHOSrXNzc5MuRZKk2zCnJEldZ1ZJ0nSZqcZfVW2rqpPXr18/6VIkSboNc0qS1HVmlSRNl5lq/EmSJEmSJEmzwsafJEmSJEmS1EM2/iRJkiRJkqQesvEnSZIkSZIk9dDUN/6S/HmSzyepSdciSdJC5pQkqevMKknqr6lv/AFvBh446SIkSVqEOSVJ6jqzSpJ6aiKNvyQbk7wuyRVJdiW5eJFxRyV5b5KdSa5LcmqSvQbHVNW/VNWX1qRwSdJMMKckSV1nVkmSRrH3hM57NHAccCmwz7ABSTYAFwJXAccD9wbOoGlWvnhtypQkzShzSpLUdWaVJGm3JtX421ZV7wRI8jbgwCFjTgH2BZ5UVTcCFyRZB2xJcnq7TZKk1WBOSZK6zqySJO3WRN7qW1U3jzDsWOD8BWH0FprgeuSqFCZJEuaUJKn7zCpJ0ii6vLjHkcDVgxuq6lpgZ7tvWZKcnOSyJJddf/31YypRkjTDzClJUteZVZI047rc+NsA3DBk+452HwBJzk7yufbzzyU5e9jBqmprVW2qqk0HHXTQatQrSZot5pQkqevMKkmacZOa429squqZo45NshnYvHHjxlWsSJKkW5hTkqSuM6skqb+6fMffDmD9kO0b2n3LVlXbqurk9euHHVaSpGUxpyRJXWdWSdKM63Lj72oWzDuR5FBgPxbMUzGqJJuTbJ2bmxtDeZKkGWdOSZK6zqySpBnX5cbfu4HHJTlgYNtTgZuAS1ZyQK9OSZLGyJySJHWdWSVJM24ic/wl2Q84rn16CLAuyQnt8/OqaidwFvAc4NwkpwFHAFuAMxcsR7+c8zofhSRpt8wpSVLXmVWSpFFManGPg4G3Ltg2//xwYHtV7UjyGOA1wDaa1aheSRNUK1JV24BtmzZtOmmlx5AkzQRzSpLUdWaVJGm3JtL4q6rtQEYYdxXw6FUvSJKkAeaUJKnrzCpJ0ii6PMff2DkRrSSpy8wpSVLXmVWSNF1mqvHnRLSSpC4zpyRJXWdWSdJ0manGnyRJkiRJkjQrZqrx523pkqQuM6ckSV1nVknSdJmpxp+3pUuSusyckiR1nVklSdNlphp/kiRJkiRJ0qyw8SdJkiRJkiT10Ew1/pyPQpLUZeaUJKnrzCpJmi4z1fhzPgpJUpeZU5KkrjOrJGm6zFTjT5IkSZIkSZoVNv4kSZIkSZKkHrLxJ0mSJEmSJPXQTDX+nIhWktRl5pQkqevMKkmaLjPV+HMiWklSl5lTkqSuM6skabrMVONPkiRJkiRJmhU2/iRJkiRJkqQesvEnSZIkSZIk9ZCNP0mSJEmSJKmHbPxJkiRJkiRJPTRTjT+XnpckdZk5JUnqOrNKkqbLTDX+XHpektRl5pQkqevMKkmaLjPV+JMkSZIkSZJmhY0/SZIkSZIkqYds/EmSJEmSJEk9ZONPkiRJkiRJ6iEbf5IkSZIkSVIPTXXjL8kPJvlwkk8m+cckB0y6JkmSBplVkqQuM6ckqd+muvEHnAW8uKruA1wNvGDC9UiStJBZJUnqMnNKknpszRt/STYmeV2SK5LsSnLxIuOOSvLeJDuTXJfk1CR7Dey/K3B4VZ3XbvpL4Mmr/xVIkvrOrJIkdZk5JUka1d4TOOfRwHHApcA+wwYk2QBcCFwFHA/cGziDplH54nbY9wOfG3jZtcChq1OyJGnGmFWSpC4zpyRJI5lE429bVb0TIMnbgAOHjDkF2Bd4UlXdCFyQZB2wJcnp7basWcWSpFljVkmSusyckiSNZM3f6ltVN48w7Fjg/DaM5r2FJrge2T7/HM0Vqnn35NZXqyRJWhGzSpLUZeaUJGlUk7jjbxRHAhcNbqiqa5PsbPdtq6ovJtme5Lh2TopfBc5d7IBJTgZObp9+I8nHV6n2cTsQ+MpanCinrcVZJK2Bafu9ca+xHGXtjTWrzKndS7wxReqRafrdYU61zKoRmFVSn0zT745Fs6qrjb8NwA1Dtu9o9817FvCGJH8CfBz4xcUOWFVbga1jrHFNJLmsqjZNug5J08PfG2tmrFllTkmaJf7uWBP+TdXy503SSvTld0dXG38jqaorgAdMug5JkhZjVkmSusyckqR+W/M5/ka0A1g/ZPuGdp8kSZNmVkmSusyckiR1tvF3Nc28E9+T5FBgv3bfLJm6W+klTZy/N9aGWdXw503SSvi7Y/WZU7fw503SSvTid0dXG3/vBh6X5ICBbU8FbgIumUxJk9HOoyFJI/P3xpoxq/DnTdLK+LtjTZhTLX/eJK1EX353rPkcf0n2A45rnx4CrEtyQvv8vKraCZwFPAc4N8lpwBHAFuDMBcvRS5I0dmaVJKnLzClJ0qhSVWt7wuQw4NOL7D68qra3444CXgM8jGY1qrOBLVW1a/WrlCTNMrNKktRl5pQkaVRr3vjT7rUB/WpuHdAvNaAlLSbJRuB3aH5vHA28v6qOmWhR6jWzStJymVVaS+aUpOXqa06t+Vt9tbQkG4ALgauA44F7A2fQzMf44gmWJqnbjqZ5y8+lwD4TrkU9Z1ZJWiGzSmvCnJK0Qr3MKe/465gkvwe8ALjX/NwbSV5AMx/H3ZyPQ9IwSW5XVTe3n78NOLAPV6fUTWaVpJUwq7RWzClJK9HXnOrqqr6z7Fjg/AVh9BZgX+CRkylJUtfNB5S0RswqSctmVmkNmVOSlq2vOWXjr3uOBK4e3FBV1wI7232SJE2aWSVJ6jJzSpJaNv66ZwPN5LML7Wj3SZI0aWaVJKnLzClJatn4kyRJkiRJknrIxl/37ADWD9m+od0nSdKkmVWSpC4zpySpZeOve65mwbwTSQ4F9mPBPBWSJE2IWSVJ6jJzSpJaNv66593A45IcMLDtqcBNwCWTKUmSpFsxqyRJXWZOSVJr70kXoNs4C3gOcG6S04AjgC3AmQuWo5ek70myH3Bc+/QQYF2SE9rn51XVzslUpp4yqyQtm1mlNWROSVq2vuZUqmrSNWiBJEcBrwEeRrMa1dnAlqraNcm6JHVXksOATy+y+/Cq2r521WgWmFWSlsus0loypyQtV19zysafJEmSJEmS1EPO8SdJkiRJkiT1kI0/SZIkSZIkqYds/EmSJEmSJEk9ZONPkiRJkiRJ6iEbf5IkSZIkSVIP2fiTJEmSJEmSesjGnzQhSZ6c5KIkNyT5dpJPJDkzyT2SHJakkvzUpOuUJM0us0qS1GXmlLR7Nv6kCUhyBvD3wDXA04HHAq8EHgO8doKlSZIEmFWSpG4zp6TR7D3pAqRZk2Qz8DzgV6vq9QO7LkmylSawJEmaGLNKktRl5pQ0Ou/4k9bebwEfXhBQAFTVrqp697AXJfmlJB9I8rUkO5K8L8mmBWOOTvLP7ZhvJvlYkl8f2P+IJO9PcmP7+K8kTxn7VyhJmnZmlSSpy8wpaUTe8SetoST7AD8KnLGClx8G/DXwKeD2wM8D709ydFVd047ZBnwMeBrwbeC+wLr23OuAfwLeCZwKBPgh4E4r+2okSX1kVkmSusyckpbHxp+0tu4C3AG4drkvrKpT5z9PcjvgAuDBNIF0apIDgcOB46vqv9uh7x04xA8A64HfqKqvt9ves+yvQJLUd2aVJKnLzClpGXyrrzQZtdwXJLlfkrcn+RKwC/guzdWnH2iHfA34LHBWkqcmOXjBIT4FfAN4U5Ljk9xpxdVLkmaBWSVJ6jJzShqBjT9pbX2V5nbxey7nRUkOoLmSdCjNJLY/BvwI8BHgjgBVdTPNJLZfBF4PfLGde+IB7f4dwE8C+9CsfnV9knclOWIMX5ckqT/MKklSl5lT0jLY+JPWUFV9F/hX4HHLfOnDgO8HnlZVb6yqD1TVZTS3mQ8e/+qqejLNHBM/QRNg72pvY6eqLq2qx7f7n0RzZetNK/+KJEl9Y1ZJkrrMnJKWx8aftPZeBWxK8ssLdyS5XZLHD3nNvu3Hbw+M/VGayWlvo6q+W1UXAWcCd2fBZLNVdVNVbaO5inXU8r8ESVLPvQqzSpLUXa/CnJJG4uIe0hqrqm1JzgT+MsnDaVaE+gZwJHAKsJ1mefpBl7Zj/iLJ6TRXqrYAn58fkOT+wB8DfwdcA2wAXgh8pKq+luQJwDOAd9BMhHsI8GvARavxdUqSppdZJUnqMnNKGp2NP2kCqur5Sf4N+A2a28L3pQmnf6QJmjsuGP+lJE9p970T+CRNoL1gYNgXgS8B/xe4B3AD8D6aoAL4H5oJcF8OHAxcT7MU/YvG/fVJkqafWSVJ6jJzShpNqpa9EI4kSZIkSZKkjnOOP0mSJEmSJKmHbPxJkiRJkiRJPWTjT5IkSZIkSeohG3+SJEmSJElSD9n4kyRJkiRJknrIxp8kSZIkSZLUQzb+JEmSJEmSpB6y8SdJkiRJkiT10P8PZdpUE8PhEBUAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "plt.rcParams['font.size']=15\n", - "f = plt.figure(figsize=(18, 5))\n", - "\n", - "ax = f.add_subplot(1,3,1)\n", - "ax.hist(train_test_data[\"train\"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True)\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Train:\\nDistribution of Classes\")\n", - "ax.set_ylim([1, 1e6])\n", - "\n", - "ax = f.add_subplot(1,3,2)\n", - "ax.hist(train_test_data[\"dev\"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True, color='k')\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Dev:\\nDistribution of Classes\")\n", - "ax.set_ylim([1, 1e6])\n", - "\n", - "\n", - "ax = f.add_subplot(1,3,3)\n", - "ax.hist(train_test_data[\"test\"]['Class'], bins = [-0.1, 0.1, 0.9, 1.1], log=True, color='r')\n", - "ax.set_ylabel(\"Log Counts\")\n", - "ax.set_xticks([0, 1])\n", - "ax.set_xticklabels([\"0\", \"1\"])\n", - "ax.set_xlabel(\"Class\")\n", - "ax.set_title(\"Test:\\nDistribution of Classes\")\n", - "ax.set_ylim([1, 1e6])\n", - "\n", - "f.tight_layout()" - ] - }, - { - "cell_type": "markdown", - "id": "operational-binary", - "metadata": {}, - "source": [ - "As you can see, our splitter has greatly increased the representation of the minority class within the training data, but not so for the testing or dev data.\n", - "\n", - "We hope this tutorial was informative on how to introduce a **custom splitter method** to your datasets! For more customization tutorials, please check our [documentation](https://lightwood.io/tutorials.html).\n", - "\n", - "If you want to download the Jupyter-notebook version of this tutorial, check out the source github location found here: `lightwood/docssrc/source/tutorials/custom_splitter`. " - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/tutorials/tutorial_data_analysis/tutorial_data_analysis.html b/docs/tutorials/tutorial_data_analysis/tutorial_data_analysis.html deleted file mode 100644 index 03854fb36..000000000 --- a/docs/tutorials/tutorial_data_analysis/tutorial_data_analysis.html +++ /dev/null @@ -1,1057 +0,0 @@ - - - - - - - - - - Tutorial - Introduction to Lightwood’s statistical analysis — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Tutorial - Introduction to Lightwood’s statistical analysis
  • - - -
  • - - - View page source - - -
  • - -
- - -
-
-
-
- - - -
-

Tutorial - Introduction to Lightwood’s statistical analysis

-

As you might already know, Lightwood is designed to be a flexible machine learning (ML) library that is able to abstract and automate the entire ML pipeline. Crucially, it is also designed to be extended or modified very easily according to your needs, essentially offering the entire spectrum between fully automated AutoML and a lightweight wrapper for customized ML pipelines.

-

As such, we can identify several different customizable “phases” in the process. The relevant phase for this tutorial is the “statistical analysis” that is normally ran in two different places:

-
    -
  • To generate a Json AI object from some dataset and a problem definition

  • -
  • To train a Lightwood predictor

  • -
-

In both cases, we generate a StatisticalAnalyzer object to store key facts about the data we are using, and refer to them afterwards.

-
-

Objective

-

In this tutorial, we will take a look at the automatically generated statistical analysis for a sample dataset.

-
-
-

Step 1: load the dataset and define the predictive task

-

The first thing we need is a dataset to analyze. Let’s use Human Development Index information:

-
-
[1]:
-
-
-
-import pandas as pd
-
-df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/stable/tests/data/hdi.csv')
-df.head()
-
-
-
-
-
[1]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PopulationArea (sq. mi.)Pop. DensityGDP ($ per capita)Literacy (%)Infant mortalityDevelopment Index
0994420112840007.7120047.593.822
1545066143094126.531100100.04.564
22678338343707261.3150040.450.252
3943910292.5340097.07.354
4343193217622019.51280098.011.953
-
-
-

This dataset has a handful of important factors to each country’s development index, as well as the index itself (very high, high, medium or low). Each row gives information about a country’s status in terms of their population size, density, and GDP per capita, among others.

-

We can see there are columns with integer (e.g. Population), float (Pop. Density) or categorical (e.g. Development Index) data.

-

The task we will consider here is to predicting the development index of each nation based on the rest of the available information.

-

Lightwood provides an abstraction called ProblemDefinition to specify the target column of a dataset, along with other important parameters that you might want to define (for a complete list, check the documentation).

-

We will create a simple one:

-
-
[23]:
-
-
-
-from lightwood.api.high_level import ProblemDefinition
-
-problem_definition = ProblemDefinition.from_dict({'target': 'Development Index'})
-
-
-
-

Let’s see how this object has been populated. ProblemDefinition is a Python dataclass, so it comes with some convenient tools to achieve this:

-
-
[24]:
-
-
-
-from dataclasses import fields
-
-{field.name: getattr(problem_definition, field.name) for field in fields(ProblemDefinition)}
-
-
-
-
-
[24]:
-
-
-
-
-{'target': 'Development Index',
- 'pct_invalid': 2,
- 'unbias_target': True,
- 'seconds_per_mixer': None,
- 'seconds_per_encoder': None,
- 'time_aim': None,
- 'target_weights': None,
- 'positive_domain': False,
- 'timeseries_settings': TimeseriesSettings(is_timeseries=False, order_by=None, window=None, group_by=None, use_previous_target=True, nr_predictions=None, historical_columns=None, target_type='', allow_incomplete_history=False),
- 'anomaly_detection': True,
- 'ignore_features': [],
- 'fit_on_all': True,
- 'strict_mode': True,
- 'seed_nr': 420}
-
-
-

Notice how, even though we only defined what the target was, there are a bunch of additional parameters that have been assigned a default value. That is fine for our purposes, but remember that you can set any of these according to your own predictive needs.

-

We also need to infer the type of each column. There is a method for this, infer_types, that we can use:

-
-
[25]:
-
-
-
-from lightwood.data import infer_types
-from lightwood.api.types import TypeInformation
-
-type_information = infer_types(df, problem_definition.pct_invalid)
-
-{field.name for field in fields(TypeInformation)}  # show the fields this dataclass has
-
-
-
-
-
-
-
-
-INFO:lightwood-42831:Analyzing a sample of 222
-INFO:lightwood-42831:from a total population of 225, this is equivalent to 98.7% of your data.
-INFO:lightwood-42831:Using 15 processes to deduct types.
-
-
-
-
[25]:
-
-
-
-
-{'additional_info', 'dtypes', 'identifiers'}
-
-
-

We can now check the inferred types:

-
-
[26]:
-
-
-
-type_information.dtypes
-
-
-
-
-
[26]:
-
-
-
-
-{'Population': 'integer',
- 'Area (sq. mi.)': 'integer',
- 'Pop. Density ': 'float',
- 'GDP ($ per capita)': 'integer',
- 'Literacy (%)': 'float',
- 'Infant mortality ': 'float',
- 'Development Index': 'categorical'}
-
-
-

Looks OK!

-
-
-

Step 2: Run the statistical analysis

-

We now have all the necessary ingredients to run the statistical analysis. Normally, you would ask Lightwood for a Json AI object to be generated according to the dataset and the problem definition. Internally, Lightwood will then run the statistical analysis for the provided dataset, and store it for later usage.

-

Afterwards, you would make modifications to the Json AI as needed (for some examples, check out the other tutorials in lightwood/examples/json_ai), and finally generate a predictor object to learn and predict the task.

-

In this case though, we will call it directly:

-
-
[27]:
-
-
-
-from lightwood.api.types import StatisticalAnalysis  # the class where everything is stored
-from lightwood.data import statistical_analysis      # generates an instance of the class
-
-stan = statistical_analysis(df,
-                            type_information.dtypes,
-                            type_information.identifiers,
-                            problem_definition)
-
-
-
-
-
-
-
-
-INFO:lightwood-42831:Starting statistical analysis
-INFO:lightwood-42831:Finished statistical analysis
-
-
-
-
-

Step 3: Peeking inside

-

Now that our analysis is complete, we can check what Lightwood thinks of this dataset:

-
-
[28]:
-
-
-
-{field.name for field in fields(StatisticalAnalysis)}  # show the fields this dataclass has
-
-
-
-
-
[28]:
-
-
-
-
-{'avg_words_per_sentence',
- 'bias',
- 'buckets',
- 'df_std_dev',
- 'distinct',
- 'histograms',
- 'missing',
- 'nr_rows',
- 'positive_domain',
- 'target_class_distribution',
- 'train_observed_classes'}
-
-
-

Some of these fields aren’t really applicable nor useful for this dataset, so let’s only check the ones that are.

-

We can start with a very basic question: how many rows does the dataset have?

-
-
[29]:
-
-
-
-stan.nr_rows
-
-
-
-
-
[29]:
-
-
-
-
-225
-
-
-

Here are some other insights produced in the analysis:

-
-

Amount of missing information

-

Is there missing information in the dataset?

-
-
[30]:
-
-
-
-stan.missing
-
-
-
-
-
[30]:
-
-
-
-
-{'Population': 0.0,
- 'Area (sq. mi.)': 0.0,
- 'Pop. Density ': 0.0,
- 'GDP ($ per capita)': 0.0,
- 'Literacy (%)': 0.0,
- 'Infant mortality ': 0.0,
- 'Development Index': 0.0}
-
-
-

Seemingly not!

-
-
-

Buckets per column

-

For numerical colums, values are bucketized into discrete ranges.

-

Each categorical column gets a bucket per each observed class.

-

Let’s check an example for one of each:

-
-
[32]:
-
-
-
-stan.buckets['Development Index']  # categorical
-
-
-
-
-
[32]:
-
-
-
-
-['3', '4', '2', '1']
-
-
-
-
[37]:
-
-
-
-stan.buckets['GDP ($ per capita)']  # numerical
-
-
-
-
-
[37]:
-
-
-
-
-[500,
- 1592,
- 2684,
- 3776,
- 4868,
- 5960,
- 7052,
- 8144,
- 9236,
- 10328,
- 11420,
- 12512,
- 13604,
- 14696,
- 15788,
- 16880,
- 17972,
- 19064,
- 20156,
- 21248,
- 22340,
- 23432,
- 24524,
- 25616,
- 26708,
- 27800,
- 28892,
- 29984,
- 31076,
- 32168,
- 33260,
- 34352,
- 35444,
- 36536,
- 37628,
- 38720,
- 39812,
- 40904,
- 41996,
- 43088,
- 44180,
- 45272,
- 46364,
- 47456,
- 48548,
- 49640,
- 50732,
- 51824,
- 52916,
- 54008]
-
-
-
-
-

Bias per column

-

We can also check whether each column has buckets of data that exhibit some degree of bias:

-
-
[38]:
-
-
-
-for colname, col in stan.bias.items():
-    print(f"'{colname}' entropy: {round(col['entropy'], 3)}")
-    print(f"Biased buckets: {col['biased_buckets']}\n" if col['biased_buckets'] else '\n')
-
-
-
-
-
-
-
-
-'Population' entropy: 0.212
-Biased buckets: [131403695, 78845027, 52565693, 26286360, 7026]
-
-'Area (sq. mi.)' entropy: 0.294
-
-
-'Pop. Density ' entropy: 0.143
-Biased buckets: [650.86, 6183.17, 976.29, 325.43, 0.0]
-
-'GDP ($ per capita)' entropy: 0.76
-
-
-'Literacy (%)' entropy: 0.753
-
-
-'Infant mortality ' entropy: 0.767
-
-
-'Development Index' entropy: 0.89
-
-
-
-
-
-
-

Column histograms

-

Finally, let’s plot histograms for some columns:

-
-
[39]:
-
-
-
-import numpy as np
-import matplotlib.pyplot as plt
-
-# generate color map
-cmap = plt.cm.tab10
-colors = cmap(np.arange(len(df)) % cmap.N)
-
-# column barplot
-columns = []
-for colname, hist in stan.histograms.items():
-    fig, ax = plt.subplots(figsize=(18, 6))
-
-    ax.bar(np.arange(len(hist['x'])), hist['y'], color=colors)
-    ax.set_xticks(np.arange(len(hist['x'])))
-    ax.set_xticklabels(hist['x'], rotation=60)
-    ax.set_title(f"Histogram for column {colname}")
-
-    plt.show()
-
-
-
-
-
-
-
-../../_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_0.png -
-
-
-
-
-
-../../_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_1.png -
-
-
-
-
-
-../../_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_2.png -
-
-
-
-
-
-../../_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_3.png -
-
-
-
-
-
-../../_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_4.png -
-
-
-
-
-
-../../_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_5.png -
-
-
-
-
-
-../../_images/tutorials_tutorial_data_analysis_tutorial_data_analysis_28_6.png -
-
-

This way, it is fairly easy to understand how imbalanced the target distribution might be, along with a quick pass to search for outliers, for example.

-
-
-
-
-

Final thoughts

-

Lightwood automatically tries to leverage all the information provided by a StatisticalAnalysis instance when generating a predictor for any given dataset and problem definition. Additionally, it is a valuable tool to explore the data as a user.

-

Finally, be aware that you can access these insights when creating custom blocks (e.g. encoders, mixers, or analyzers) if you want, you just need to pass whatever is necessary as arguments to these blocks inside the Json AI object.

-
-
[ ]:
-
-
-
-
-
-
-
-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/tutorial_data_analysis/tutorial_data_analysis.ipynb b/docs/tutorials/tutorial_data_analysis/tutorial_data_analysis.ipynb deleted file mode 100644 index 08a3e7b02..000000000 --- a/docs/tutorials/tutorial_data_analysis/tutorial_data_analysis.ipynb +++ /dev/null @@ -1,774 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Introduction to Lightwood's statistical analysis\n", - "\n", - "\n", - "As you might already know, Lightwood is designed to be a flexible machine learning (ML) library that is able to abstract and automate the entire ML pipeline. Crucially, it is also designed to be extended or modified very easily according to your needs, essentially offering the entire spectrum between fully automated AutoML and a lightweight wrapper for customized ML pipelines.\n", - "\n", - "As such, we can identify several different customizable \"phases\" in the process. The relevant phase for this tutorial is the \"statistical analysis\" that is normally ran in two different places:\n", - "\n", - "* To generate a Json AI object from some dataset and a problem definition\n", - "* To train a Lightwood predictor\n", - "\n", - "In both cases, we generate a `StatisticalAnalyzer` object to store key facts about the data we are using, and refer to them afterwards.\n", - "\n", - "## Objective\n", - "\n", - "In this tutorial, we will take a look at the automatically generated statistical analysis for a sample dataset." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 1: load the dataset and define the predictive task\n", - "\n", - "The first thing we need is a dataset to analyze. Let's use Human Development Index information:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
PopulationArea (sq. mi.)Pop. DensityGDP ($ per capita)Literacy (%)Infant mortalityDevelopment Index
0994420112840007.7120047.593.822
1545066143094126.531100100.04.564
22678338343707261.3150040.450.252
3943910292.5340097.07.354
4343193217622019.51280098.011.953
\n", - "
" - ], - "text/plain": [ - " Population Area (sq. mi.) Pop. Density GDP ($ per capita) \\\n", - "0 9944201 1284000 7.7 1200 \n", - "1 5450661 43094 126.5 31100 \n", - "2 26783383 437072 61.3 1500 \n", - "3 9439 102 92.5 3400 \n", - "4 3431932 176220 19.5 12800 \n", - "\n", - " Literacy (%) Infant mortality Development Index \n", - "0 47.5 93.82 2 \n", - "1 100.0 4.56 4 \n", - "2 40.4 50.25 2 \n", - "3 97.0 7.35 4 \n", - "4 98.0 11.95 3 " - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import pandas as pd\n", - "\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/stable/tests/data/hdi.csv')\n", - "df.head()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This dataset has a handful of important factors to each country's development index, as well as the index itself (very high, high, medium or low). Each row gives information about a country's status in terms of their population size, density, and GDP per capita, among others.\n", - "\n", - "We can see there are columns with integer (e.g. `Population`), float (`Pop. Density`) or categorical (e.g. `Development Index`) data.\n", - "\n", - "The task we will consider here is to predicting the development index of each nation based on the rest of the available information.\n", - "\n", - "Lightwood provides an abstraction called `ProblemDefinition` to specify the target column of a dataset, along with other important parameters that you might want to define (for a complete list, check the documentation).\n", - "\n", - "We will create a simple one:" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.api.high_level import ProblemDefinition\n", - "\n", - "problem_definition = ProblemDefinition.from_dict({'target': 'Development Index'})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's see how this object has been populated. `ProblemDefinition` is a Python `dataclass`, so it comes with some convenient tools to achieve this:" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'target': 'Development Index',\n", - " 'pct_invalid': 2,\n", - " 'unbias_target': True,\n", - " 'seconds_per_mixer': None,\n", - " 'seconds_per_encoder': None,\n", - " 'time_aim': None,\n", - " 'target_weights': None,\n", - " 'positive_domain': False,\n", - " 'timeseries_settings': TimeseriesSettings(is_timeseries=False, order_by=None, window=None, group_by=None, use_previous_target=True, nr_predictions=None, historical_columns=None, target_type='', allow_incomplete_history=False),\n", - " 'anomaly_detection': True,\n", - " 'ignore_features': [],\n", - " 'fit_on_all': True,\n", - " 'strict_mode': True,\n", - " 'seed_nr': 420}" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from dataclasses import fields\n", - "\n", - "{field.name: getattr(problem_definition, field.name) for field in fields(ProblemDefinition)}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice how, even though we only defined what the `target` was, there are a bunch of additional parameters that have been assigned a default value. That is fine for our purposes, but remember that you can set any of these according to your own predictive needs.\n", - "\n", - "We also need to infer the type of each column. There is a method for this, `infer_types`, that we can use:" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-42831:Analyzing a sample of 222\u001b[0m\n", - "\u001b[32mINFO:lightwood-42831:from a total population of 225, this is equivalent to 98.7% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-42831:Using 15 processes to deduct types.\u001b[0m\n" - ] - }, - { - "data": { - "text/plain": [ - "{'additional_info', 'dtypes', 'identifiers'}" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from lightwood.data import infer_types\n", - "from lightwood.api.types import TypeInformation\n", - "\n", - "type_information = infer_types(df, problem_definition.pct_invalid)\n", - "\n", - "{field.name for field in fields(TypeInformation)} # show the fields this dataclass has" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can now check the inferred types:" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'Population': 'integer',\n", - " 'Area (sq. mi.)': 'integer',\n", - " 'Pop. Density ': 'float',\n", - " 'GDP ($ per capita)': 'integer',\n", - " 'Literacy (%)': 'float',\n", - " 'Infant mortality ': 'float',\n", - " 'Development Index': 'categorical'}" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type_information.dtypes" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Looks OK!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 2: Run the statistical analysis\n", - "\n", - "We now have all the necessary ingredients to run the statistical analysis. Normally, you would ask Lightwood for a Json AI object to be generated according to the dataset and the problem definition. Internally, Lightwood will then run the statistical analysis for the provided dataset, and store it for later usage.\n", - "\n", - "Afterwards, you would make modifications to the Json AI as needed (for some examples, check out the other tutorials in `lightwood/examples/json_ai`), and finally generate a predictor object to learn and predict the task.\n", - "\n", - "In this case though, we will call it directly:" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-42831:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-42831:Finished statistical analysis\u001b[0m\n" - ] - } - ], - "source": [ - "from lightwood.api.types import StatisticalAnalysis # the class where everything is stored\n", - "from lightwood.data import statistical_analysis # generates an instance of the class\n", - "\n", - "stan = statistical_analysis(df, \n", - " type_information.dtypes, \n", - " type_information.identifiers, \n", - " problem_definition)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 3: Peeking inside\n", - "\n", - "Now that our analysis is complete, we can check what Lightwood thinks of this dataset:" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'avg_words_per_sentence',\n", - " 'bias',\n", - " 'buckets',\n", - " 'df_std_dev',\n", - " 'distinct',\n", - " 'histograms',\n", - " 'missing',\n", - " 'nr_rows',\n", - " 'positive_domain',\n", - " 'target_class_distribution',\n", - " 'train_observed_classes'}" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "{field.name for field in fields(StatisticalAnalysis)} # show the fields this dataclass has" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Some of these fields aren't really applicable nor useful for this dataset, so let's only check the ones that are. \n", - "\n", - "We can start with a very basic question: how many rows does the dataset have?" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "225" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.nr_rows" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here are some other insights produced in the analysis:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Amount of missing information\n", - "\n", - "Is there missing information in the dataset?" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "{'Population': 0.0,\n", - " 'Area (sq. mi.)': 0.0,\n", - " 'Pop. Density ': 0.0,\n", - " 'GDP ($ per capita)': 0.0,\n", - " 'Literacy (%)': 0.0,\n", - " 'Infant mortality ': 0.0,\n", - " 'Development Index': 0.0}" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.missing" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Seemingly not!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Buckets per column\n", - "\n", - "For numerical colums, values are bucketized into discrete ranges.\n", - "\n", - "Each categorical column gets a bucket per each observed class.\n", - "\n", - "Let's check an example for one of each:" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['3', '4', '2', '1']" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.buckets['Development Index'] # categorical" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[500,\n", - " 1592,\n", - " 2684,\n", - " 3776,\n", - " 4868,\n", - " 5960,\n", - " 7052,\n", - " 8144,\n", - " 9236,\n", - " 10328,\n", - " 11420,\n", - " 12512,\n", - " 13604,\n", - " 14696,\n", - " 15788,\n", - " 16880,\n", - " 17972,\n", - " 19064,\n", - " 20156,\n", - " 21248,\n", - " 22340,\n", - " 23432,\n", - " 24524,\n", - " 25616,\n", - " 26708,\n", - " 27800,\n", - " 28892,\n", - " 29984,\n", - " 31076,\n", - " 32168,\n", - " 33260,\n", - " 34352,\n", - " 35444,\n", - " 36536,\n", - " 37628,\n", - " 38720,\n", - " 39812,\n", - " 40904,\n", - " 41996,\n", - " 43088,\n", - " 44180,\n", - " 45272,\n", - " 46364,\n", - " 47456,\n", - " 48548,\n", - " 49640,\n", - " 50732,\n", - " 51824,\n", - " 52916,\n", - " 54008]" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "stan.buckets['GDP ($ per capita)'] # numerical" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Bias per column\n", - "\n", - "We can also check whether each column has buckets of data that exhibit some degree of bias:" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "'Population' entropy: 0.212\n", - "Biased buckets: [131403695, 78845027, 52565693, 26286360, 7026]\n", - "\n", - "'Area (sq. mi.)' entropy: 0.294\n", - "\n", - "\n", - "'Pop. Density ' entropy: 0.143\n", - "Biased buckets: [650.86, 6183.17, 976.29, 325.43, 0.0]\n", - "\n", - "'GDP ($ per capita)' entropy: 0.76\n", - "\n", - "\n", - "'Literacy (%)' entropy: 0.753\n", - "\n", - "\n", - "'Infant mortality ' entropy: 0.767\n", - "\n", - "\n", - "'Development Index' entropy: 0.89\n", - "\n", - "\n" - ] - } - ], - "source": [ - "for colname, col in stan.bias.items():\n", - " print(f\"'{colname}' entropy: {round(col['entropy'], 3)}\")\n", - " print(f\"Biased buckets: {col['biased_buckets']}\\n\" if col['biased_buckets'] else '\\n')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Column histograms\n", - "\n", - "Finally, let's plot histograms for some columns:" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABBQAAAGnCAYAAAAHeQTgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACo20lEQVR4nOydd7gkRdX/P7WZsEl2WTJLXvICS845Z0GQnKOSc1qSZFhyzjnnJBkBQUkiQQUVFUXArO/rz1elfn+c00zd3rm3q2dm2bvL9/M889zp7ulzK4dTp06FGCNCCCGEEEIIIYQQdegzuQMghBBCCCGEEEKIKQ8pFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCfOUIIbwTQlhtcodjUhNCOCWE8IcQwu8nYxhWCyF8NLn+/+QmhPBsCGH3Ft+dI4TwjxBC306HSwghhOgEUigIIYSYqgghfBhCWKt0b+cQwgvFdYxx4RjjsxVyRocQYgih3yQK6iQlhDAHcAiwUIxxpskdnt6A5+f/+CT9tyGEc3vTZL1cdmOMv44xTh9j/O/kDJcQQgjRHVIoCCGEEJOBL0FRMQfwxxjjp3VfnFKVKJksHmOcHlgT+Cawx2QOjxBCCDHFIoWCEEKIrxzpSnAIYZkQwqshhL+FED4JIZzrP3ve//7FV7SXDyH0CSEcG0L4VQjh0xDCDSGEoYncHf3ZH0MIx5X+z/gQwl0hhJtCCH8Ddvb//f0Qwl9CCB+HEC4KIQxI5MUQwr4hhPdDCH8PIZwcQpgnhPCSh/eO9PfJe2sBTwCzeNiv8/ub+HaPv7gp/oKlNDkihPAW8D/NlAohhIVDCE+EEP7kaXW03x8YQpgQQvidfyaEEAZ2k/YxhDBvcn1dCOEU/75aCOGjEMLhnr4fhxA2CyFsEEL4mf/fo5N3x3sa3ODp804IYVyPme/EGH8CfA9YxGXtEUL4wP/HAyGEWUph/nYI4Re+heSsEEKfJAw3Jb/t1rLF8+5pLx9/CCHcHEIY5s9uxJRAD3qeHV6WFUKYxcP2Jw/rHonsltNCCCGEaBUpFIQQQnzVOR84P8Y4BJgHuMPvr+J/h7nZ+feBnf2zOjA3MD1wEUAIYSHgEmA7YGZgKDBr6X9tCtwFDANuBv4LHASMAJbHVs33Lb2zLrAUsBxwOHAFsD0wOzYZ3rYcoRjjk8D6wO887DuHEOYHbgUOBEYCj2CT11QhsS2wocf5P6nMEMJg4EngMWAWYF7gKX98jIdvLLA4sAxwbDlcmcwEDMLS7njgSo/vUsDKwHEhhLmS328C3Ial6QN4flTh+bUy8EYIYQ3gNGBrLO9+5TJTNgfGAUti+bhr/agR/P/MAiyI5eF4gBjjDsCvgY09z85s8v5twEf+/teB73jYC1pKCyGEEKJVpFAQQggxNXKfr8L/JYTwF2yi3x3/BuYNIYyIMf4jxvhyD7/dDjg3xviLGOM/gKOAbXwF+evAgzHGF2KM/4dNhmPp/e/HGO+LMX4eY/xnjPG1GOPLMcb/xBg/BC4HVi29c2aM8W8xxneAt4Hv+v//K/AosERWisA3gIdjjE/EGP8NnA1MA6yQ/OaCGONvYoz/bPL+RsDvY4znxBj/X4zx7zHGV5J0OSnG+GmM8TPgRGCHzHCV+TdwqofxNkzZcr7/v3eAdzGlRcELMcZH3M/AjaVnzXg9hPBn4EHgKuBaD/81McbXY4z/wvJ1+RDC6OS9M2KMf4ox/hqYQBNFThUxxg88/f/l6XQuE+d3U0IIswMrAkd4+r/p4d8x+VndtBBCCCHaQgoFIYQQUyObxRiHFR8mXvVP2Q2YH/hJCOGHIYSNevjtLNjqdcGvgH7AKH/2m+JBjPF/gT+W3v9NehFCmD+E8FAI4fe+DeI72AQ65ZPk+z+bXE/fQ3i7DXuM8XMPT2pF8ZvySwmzAz/Pke3fZ+nmt1X8MXFCWCg2eopzeoLF/wKDmm03SFgyxjg8xjhPjPFYT4dy2vwDy7vu0qal+IUQRoUQbgvmEPJvwE1MnN/dMQvwpxjj30vhSMNYNy2EEEKItpBCQQghxFeaGOP7McZtgRmBM4C7QgjTMbF1AcDvgDmT6zmA/2AT3o+B2YoHIYRpgBnK/650fSnwE2A+33JxNGYWPynoEvYQQsCUBL/tIXwpv8G2eVTKxtLld9389n+BaZPr3nACRTltpsPyLk2b2ZPvafz+h/z4fAdL40U9v7ena373lP6/A77mW0/ScPy2m98LIYQQkxwpFIQQQnylCSFsH0IY6SvVf/HbnwOf+d90En0rcFAIYa4QwvTYBPF29zdwF7BxCGEF90swnmrlwGDgb8A/QghjgH06FK1m3AFsGEJYM4TQHztS8l/AS5nvPwTMHEI40J0wDg4hLOvPbgWODSGMDCGMwLZ73NSNnDeBb4YQ+oYQ1iPT5H8ScyuwSwhhrDuT/A7wim9DKTgshDDctx4cANzu998EVgkhzBHMQedRPfyfwcA/gL+GEGYFDis9/4RulDYxxt9geXVaCGFQCGExzLqmu3QWQgghJjlSKAghhPiqsx7wTgjhH5iDxm3cv8H/AqcCL7ovhuWAa7C96c8DvwT+H/AtAN/f/y1s3//H2MTxU2zS3h2HYkcX/h1zPnh7D79tixjjT7EV8QuBPwAbYw4A/y/z/b8Da/t7vwfex5xTApwCvAq8BfwYeN3vNeMAl/EXzHfBfbUj02HcieVxwN1Y3s0DbFP62f3Aa5gC4WHgan/3CSzf3vLnD/Xwr07EnDr+1WXcU3p+GqaY+UsI4dAm728LjMasFe4FTvCwCyGEEJOFEGNP1nVCCCGEaAW3YPgLtp3hl5M5OKINQggRy8cPJndYhBBCiN6ELBSEEEKIDhFC2DiEMK3vwT8bW63/cPKGSgghhBBi0iCFghBCCNE5NsXM0X8HzIdtn5ApoBBCCCGmSrTlQQghhBBCCCGEELWRhYIQQgghhBBCCCFq029yBwBgxIgRcfTo0ZM7GEIIIYQQQgghhCjx2muv/SHGOLJ8v1coFEaPHs2rr746uYMhhBBCCCGEEEKIEiGEXzW7ry0PQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKhNv8kdgCmZ0Uc+3PK7H56+YQdDIoQQQgghhBBCfLnIQkEIIYQQQgghhBC1kUJBCCGEEEIIIYQQtZFCQQghhBBCCCGEELWRQkEIIYQQQgghhBC1kUJBCCGEEEIIIYQQtZFCQQghhBBCCCGEELWpVCiEEK4JIXwaQng7uXd7COFN/3wYQnjT748OIfwzeXbZJAy7EEIIIYQQQgghJhP9Mn5zHXARcENxI8b4jeJ7COEc4K/J738eYxzbofAJIYQQQgghhBCiF1KpUIgxPh9CGN3sWQghAFsDa3Q4XEIIIYQQQgghhOjFtOtDYWXgkxjj+8m9uUIIb4QQngshrNzdiyGEPUMIr4YQXv3ss8/aDIYQQgghhBBCCCG+TNpVKGwL3JpcfwzMEWNcAjgYuCWEMKTZizHGK2KM42KM40aOHNlmMIQQQgghhBBCCPFl0rJCIYTQD9gCuL24F2P8V4zxj/79NeDnwPztBlIIIYQQQgghhBC9i3YsFNYCfhJj/Ki4EUIYGULo69/nBuYDftFeEIUQQgghhBBCCNHbyDk28lbg+8ACIYSPQgi7+aNt6LrdAWAV4C0/RvIuYO8Y4586GF4hhBBCCCGEEEL0AnJOedi2m/s7N7l3N3B3+8ESQgghhBBCCCFEb6Zdp4xCCCGEEEIIIYT4CiKFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghalOpUAghXBNC+DSE8HZyb3wI4bchhDf9s0Hy7KgQwgchhJ+GENadVAEXQgghhBBCCCHE5CPHQuE6YL0m98+LMY71zyMAIYSFgG2Ahf2dS0IIfTsVWCGEEEIIIYQQQvQOKhUKMcbngT9lytsUuC3G+K8Y4y+BD4Bl2gifEEIIIYQQQggheiHt+FDYP4Twlm+JGO73ZgV+k/zmI783ESGEPUMIr4YQXv3ss8/aCIYQQgghhBBCCCG+bFpVKFwKzAOMBT4GzqkrIMZ4RYxxXIxx3MiRI1sMhhBCCCGEEEIIISYHLSkUYoyfxBj/G2P8HLiSxraG3wKzJz+dze8JIYQQQgghhBBiKqIlhUIIYebkcnOgOAHiAWCbEMLAEMJcwHzAD9oLohBCCCGEEEIIIXob/ap+EEK4FVgNGBFC+Ag4AVgthDAWiMCHwF4AMcZ3Qgh3AO8C/wH2izH+d5KEXAghhBBCCCGEEJONSoVCjHHbJrev7uH3pwKnthMoIYQQQgghhBBC9G7aOeVBCCGEEEIIIYQQX1GkUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG0qFQohhGtCCJ+GEN5O7p0VQvhJCOGtEMK9IYRhfn90COGfIYQ3/XPZJAy7EEIIIYQQQgghJhM5FgrXAeuV7j0BLBJjXAz4GXBU8uznMcax/tm7M8EUQgghhBBCCCFEb6JSoRBjfB74U+ned2OM//HLl4HZJkHYhBBCCCGEEEII0UvphA+FXYFHk+u5QghvhBCeCyGs3N1LIYQ9QwivhhBe/eyzzzoQDCGEEEIIIYQQQnxZtKVQCCEcA/wHuNlvfQzMEWNcAjgYuCWEMKTZuzHGK2KM42KM40aOHNlOMIQQQgghhBBCCPEl07JCIYSwM7ARsF2MMQLEGP8VY/yjf38N+DkwfwfCKYQQQgghhBBCiF5ESwqFEMJ6wOHAJjHG/03ujwwh9PXvcwPzAb/oRECFEEIIIYQQQgjRe+hX9YMQwq3AasCIEMJHwAnYqQ4DgSdCCAAv+4kOqwAnhRD+DXwO7B1j/FNTwUIIIYQQQgghhJhiqVQoxBi3bXL76m5+ezdwd7uBEkIIIYQQQgghRO+mE6c8CCGEEEIIIYQQ4iuGFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKhNlkIhhHBNCOHTEMLbyb2vhRCeCCG873+H+/0QQrgghPBBCOGtEMKSkyrwQgghhBBCCCGEmDzkWihcB6xXunck8FSMcT7gKb8GWB+Yzz97Ape2H0whhBBCCCGEEEL0JrIUCjHG54E/lW5vClzv368HNkvu3xCNl4FhIYSZOxBWIYQQQgghhBBC9BLa8aEwKsb4sX//PTDKv88K/Cb53Ud+rwshhD1DCK+GEF797LPP2giGEEIIIYQQQgghvmw64pQxxhiBWPOdK2KM42KM40aOHNmJYAghhBBCCCGEEOJLoh2FwifFVgb/+6nf/y0we/K72fyeEEIIIYQQQgghphLaUSg8AOzk33cC7k/u7+inPSwH/DXZGiGEEEIIIYQQQoipgH45Pwoh3AqsBowIIXwEnACcDtwRQtgN+BWwtf/8EWAD4APgf4FdOhxmIYQQQgghhBBCTGayFAoxxm27ebRmk99GYL92AiWEEEIIIYQQQojeTUecMgohhBBCCCGEEOKrhRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSojRQKQgghhBBCCCGEqI0UCkIIIYQQQgghhKiNFApCCCGEEEIIIYSoTb9WXwwhLADcntyaGzgeGAbsAXzm94+OMT7S6v8RQgghhBBCCCFE76NlhUKM8afAWIAQQl/gt8C9wC7AeTHGszsRQCGEEEIIIYQQQvQ+OrXlYU3g5zHGX3VInhBCCCGEEEIIIXoxnVIobAPcmlzvH0J4K4RwTQhheLMXQgh7hhBeDSG8+tlnnzX7iRBCCCGEEEIIIXopbSsUQggDgE2AO/3WpcA82HaIj4Fzmr0XY7wixjguxjhu5MiR7QZDCCGEEEIIIYQQXyKdsFBYH3g9xvgJQIzxkxjjf2OMnwNXAst04H8IIYQQQgghhBCiF9EJhcK2JNsdQggzJ882B97uwP8QQgghhBBCCCFEL6LlUx4AQgjTAWsDeyW3zwwhjAUi8GHpmRBCCCGEEEIIIaYC2lIoxBj/B5ihdG+HtkIkhBBCCCGEEEKIXk+nTnkQQgghhBBCCCHEVwgpFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIURspFIQQQgghhBBCCFEbKRSEEEIIIYQQQghRGykUhBBCCCGEEEIIUZt+7QoIIXwI/B34L/CfGOO4EMLXgNuB0cCHwNYxxj+3+7+EEEIIIYQQQgjRO+iUhcLqMcaxMcZxfn0k8FSMcT7gKb8WQgghhBBCCCHEVMKk2vKwKXC9f78e2GwS/R8hhBBCCCGEEEJMBjqhUIjAd0MIr4UQ9vR7o2KMH/v33wOjyi+FEPYMIbwaQnj1s88+60AwhBBCCCGEEEII8WXRtg8FYKUY429DCDMCT4QQfpI+jDHGEEIsvxRjvAK4AmDcuHETPRdCCCGEEEIIIUTvpW0LhRjjb/3vp8C9wDLAJyGEmQH876ft/h8hhBBCCCGEEEL0HtpSKIQQpgshDC6+A+sAbwMPADv5z3YC7m/n/wghhBBCCCGEEKJ30e6Wh1HAvSGEQtYtMcbHQgg/BO4IIewG/ArYus3/I4QQQgghhBBCiF5EWwqFGOMvgMWb3P8jsGY7soUQQgghhBBCCNF7mVTHRgohhBBCCCGEEGIqRgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtRGCgUhhBBCCCGEEELURgoFIYQQQgghhBBC1EYKBSGEEEIIIYQQQtSmZYVCCGH2EMIzIYR3QwjvhBAO8PvjQwi/DSG86Z8NOhdcIYQQQgghhBBC9Ab6tfHuf4BDYoyvhxAGA6+FEJ7wZ+fFGM9uP3hCCCGEEEIIIYTojbSsUIgxfgx87N//HkJ4D5i1UwETQgghhBBCCCFE76UjPhRCCKOBJYBX/Nb+IYS3QgjXhBCGd+J/CCGEEEIIIYQQovfQtkIhhDA9cDdwYIzxb8ClwDzAWMyC4Zxu3tszhPBqCOHVzz77rN1gCCGEEEIIIYQQ4kukLYVCCKE/pky4OcZ4D0CM8ZMY439jjJ8DVwLLNHs3xnhFjHFcjHHcyJEj2wmGEEIIIYQQQgghvmTaOeUhAFcD78UYz03uz5z8bHPg7daDJ4QQQgghhBBCiN5IO6c8rAjsAPw4hPCm3zsa2DaEMBaIwIfAXm38DyGEEEIIIYQQQvRC2jnl4QUgNHn0SOvBEUIIIYQQQgghxJRAR055EEIIIYQQQgghxFcLKRSEEEIIIYQQQghRm3Z8KIhOMX5oG+/+tXPhEEIIIYQQQgghMpGFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghaiOFghBCCCGEEEIIIWojhYIQQgghhBBCCCFqI4WCEEIIIYQQQgghatNvcgdAdJZFr1+0pfd+vNOPOxwSIYQQQgghhBBTM7JQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbaRQEEIIIYQQQgghRG2kUBBCCCGEEEIIIURtpFAQQgghhBBCCCFEbXRspGjKe2MWbPndBX/yXgdDIoQQQgghhBCiNyKFgphiOOcbG7X03iG3P9ThkAghhBBCCCGE0JYHIYQQQgghhBBC1GaSWSiEENYDzgf6AlfFGE+fVP9L9F4u3vvplt/d77I1OhgSIYQQQgghhBCdZJJYKIQQ+gIXA+sDCwHbhhAWmhT/SwghhBBCCCGEEF8+k8pCYRnggxjjLwBCCLcBmwLvTqL/J0Q2Hx35vZbfne30lb/4Pn78+JbltPNuT8z0zJstv/v71cd2LBxdGD+0xff+2tlwCCGEEEJoXCI6QK8cc08mJpUPhVmB3yTXH/k9IYQQQgghhBBCTAWEGGPnhYbwdWC9GOPufr0DsGyMcf/kN3sCe/rlAsBPOx6QycsI4A+9TFZvk9NJWb1NTidl9TY5nZTV2+R0UlZvk9NJWb1NTidl9TY5nZTV2+R0UlZvk9NJWb1NTidl9TY5nZTV2+R0UlZvk9NJWb1NTidl9TY5nZTV2+R0WlZvYc4Y48jyzUm15eG3wOzJ9Wx+7wtijFcAV0yi/z/ZCSG8GmMc15tk9TY5vTFMituUGSbFbcoMk+I2ZYZJcZsyw6S4TZlhUtymzDApblNmmHpj3KYEJtWWhx8C84UQ5gohDAC2AR6YRP9LCCGEEEIIIYQQXzKTxEIhxvifEML+wOPYsZHXxBjfmRT/SwghhBBCCCGEEF8+k2rLAzHGR4BHJpX8KYBObufolKzeJqeTsnqbnE7K6m1yOimrt8nppKzeJqeTsnqbnE7K6m1yOimrt8nppKzeJqeTsnqbnE7K6m1yOimrt8nppKzeJqeTsnqbnE7K6m1yOimrt8nptKxezSRxyiiEEEIIIYQQQoipm0nlQ0EIIYQQQgghhBBTMVIoCCGEEEIIIYQQojZSKEzhhBCC/+1YXoYQ+nZKlhBCdIKirWvn3XZkCFHwVSlPU3v8OkEnxl5KZyHElI4UCpOIEMLCIYQVQwiD/HpSdRiDAWKMn7fzf0IIQ0IIy7is/7YqK4SwZQhhbDthmVLoLfHzo1k7LbNjcWtnwBVCmC6E8LV25fRmOqnAazOtJ4mT3jYVAQNDCHOEEAbH9hz+9AdoU0Yaro7Uj97ShnSaSVWWOkUH2pKOlqdO0m6ZCiGM8rHLiCJ+HZDZTrvU8Xa/A/EZFkJY0tOo5bFXMT6MMcaptS3oNFNzOnV4YbBXjZd6W3g6wdRcFlthqsvgXsStwBwxxv8HnR94hBDmDiHcApwWQng6hLBd8X/qFvIQwnzAPcCJIYRfhBBGpmHOlRdCmAe4FNgmfb9dOlFpQwj9OxGWFE/rjtShNuXcGELYqQNhGB1C2DyEMG+7eRdCmCWEsG4IYZY2lV03A4+HEIYXcloMTycVJOuGEGZoV34IYZ8QwrnAN9sMzwwhhOVDCDO1k0bAKSGE9UqyW1VQtl2WQgjLAXcDxwOHNXleJ2y3hxCuCiFM10pYSv93YNI2tlv/tw0hzFmS36oi99QQwurtBCaEME0IYd5CidcGnSxLi4YQ9vIJ3OyttuUhhPlDCDuEEOZqVwFPh8pTBxVTO4cQ1gkhTNeOEiCEsBJwH7AbML6434pMz6tNQghztNkuHRRCWLEku3a98/BsGEKYu53+zcNyD7APcF5whXCL6X5XCOHhEMLQ3qKcCiFsFEJYKLS5UNGpNqlMJ8ZdIYT1QwjTdkDO9Olf/95KvSvK0OfttgnJXKCdOtcxfF7wRdzaSXMf64wMbSqsgykER4QQRrQjB+u/Z07j1EY/15GyNDmRQmESEELYGvh9jPHWEELfEMK+IYSTQggTQggLd+jfnAN8AFwAXASMDyF8P4SwaAsd0ynAd2OM6wP3AuuFEC4JIawKtRQDJwF3AEuGEG4vT7rqEkKYJv3/bVTUscB2Te63MihZIIRwRAjhghBCv1Yb7RDCrCGENUIIB4cQQhtyVgXmwRRYhBBm8wnBmJpy1sYGkusDj3r8lkue1xlIroyVgy2BO0IIhRVNXQXVCsBMwBvACYWcuvkWQlgEa/hH13mvG1mrAuNjjH9M77cQtzWBPYBfAweHEFb3vJu/ZnhWxOrsPsB9IYRN6oSjFJ61gBf8un8IoX+LE4lOlaXTsIH7jcBcIYQNQgg7hRDWhfx2ycvRgkDA4tgyway4Li3Suc3VyVWAb8UYf5Xeb7EsHYvF78YQwuIhhMEhhCE1w7McVpZOAC4JIYyrE45SeDpVltbF2rbFgXOBQ4AtQggz+/OstsDr7Q3ASsALxfutKIY6VZ461S4FU0hdCWwG7O3hAxjmz+vk36nYosDRwPRe364MIWwLtercKjT6gFdCCHu1EBZCCGsB2wJvpfeTepeb/6t7eFbG2qR7QwjLJs/rhOtk4HpM4fIfYJsQwokhhFoLKSGE5YFZgY+BXVoMSzEmWS64tUM7hBDmBR4AdgQ2DiHM7fenrRO2TrVJibxVQgjHhxCeCiEs2ebiwi7YmPefSTmqbSUYQtgAuDCEcDNweDBl5Yga4+VCzprYwuBLobXxeypreyy9bw7tK4XbJoSwJfBkCOGEYIrF2MZYd3Nsgek2YL2Kn/ckZ1NsTHEjsHvweUYLcnYFDowxfpyUo2la7Oc6UpYmOzFGfTr8wQYtZ/n344C7sMnDMcAlQN825c8AfBdYrHT/cGwQt2wNWYsDTyfXHwFnAAcDnwDHZMrZBHjBvw/Fzl7dNnkeWojn+cB+wHxtptczwNbJ9XSthgt42dP5KU+nGf1+v5pyngImAI8BP2w1jtjK7UH+fVvgfi8D5wDr1JBzD/D1pHyd7GE8qIUwPQts798vA76FNd5715TzHLA5MAR4GDiixTT6BfC618W1gBFt5P+zSTqNA3bwsr5NTTm3Alv492OAR4AXsUnTejXkPI8pywZilg7nt5hG5wM7+/fNvJ36BXAUMLCmrLbLErAVcH9y/TvgGi/vDwOr1ZD1JGY1tZHHqVZelWT9GLOaOBdTeCzThqwXkjKwMrC/h7VuPbkX2Ny/nwjcAryLDZqz2xXge9hEYn7gSOCUXlCWrsfbbmA4pji7Hji6ppxn8f4IOM/T6Ung2y3EryPlqcPt0gSsjT0JOBPrv2+qk97AnMB9yfWHmIJhd6xt2qGGrOeLdMHGQ1e3mEZXAd/07xt4nXsfOKCmnIeTOjK31+NngP1qytkIuDu5/j3W1+4MvEIy5smQ9QzwdWARLwf7tJhGv/aydCi2uNDu+PIubBxRKE229Dj2ryGjI21SIu9HWP92kpfL7LFNE1mvAKv69zWAb3vbtBXQp4acnwPLYW33xcD3sX5hcX+eVYeBN5OyfYmXh5mAaerI8d8+jY3Fz07rawvtyXrY2OZrbZalzbycnwY8hFk+jQE28+fZZRX4CbAi1vbe5HXn68DKNdP7bWAFYFVMeXZqi2n0LLC6f98UU8JeDhxQtw52qixN7s9kD8DU+AFm8cJ2KnAtMK3fH+UNbcuD0OR/7Amc1+T+gZjWLKsQAnMAY/z7okXl8utFvIGrnCxjndlG/r0/8A3MgqJWh12K3z+xDug73tiPbEHOXsD3kutjvTF6BVixpqytMEuO4vo1TLHzPPUGW3sBDyXXJwM7tphOq2IT7/k8LAthqx6HALcD01e8H4C+WAd0GjAseTYO+AE1Bu/AasCtyfUn3tBu7OXh0Ew5KwLXJdfLYIOuE7DJc27nsSA2KF3O0/kGbLC9vD9fHpg/U9bqwM+wQciMXsfPxCaCL5AorSrk9PGw3IwNtv/m4ZvF69GdwODMvE8n3aOwgX/RwS4FzJkZpl0xzf9wTMG1ArAsNgi4nrw2oGNlyfN7Yf++LXCRfx+IKWCOzJSzOXBXcr0VNlgu8r/OgGYU1skvARQrcFcARwADPC83qhG/D7DJ2misDTnU4/oe3oZnpPdA4EJsoL0Y8CdgFWzQdomXpUEZstYHHk2u5/IwLejXCwEzT4aydDzWjs2SPFsIGzhfSF4ftzRwfXL9CabgXxUbXJ5Xowx0pDx5HK6hA+2S/34RLwOzYpPenwGvAhsCM2TKmBYbYD/raf5s8mxTzBoyJ++WLdLI83GwyyyUZyuROan0NDnby/mbHp91sL63Unnq/39abBy2Aa5gwertbljfuWmNdJ6zCLvLOyl5ti3W7uWUyXWAW5Lrtby+FYr43HK0FDaeWQlTKD2MKQVnSZ6PzpQVkvqyD9YfHYQpLF4EFsDHshXpPcDLSlttUiLzWOCm5Hp74Jzkuo6iox/WbgzxsL7m9e8wbJwwtka635tcz4AtEpxADUUsprC5xb+PxNqmG7wsnJYrJ5F1o3/fBmsD9qojI5H1d3//Qqz9nqYVOS7rEmBvry+XYRY5tRY+vL48mFx/6rIuwiyqZs2Us18p32bD5mRD/XoRYKbMenI6sKh/fwNbOPuG18M6i0JL0VWR21JZ6g2fyR6AqfWDTWRu8sb4ML83rRe8WdqQOwwbaM2EDWQ+wDXB/nx3kslqTdnTkkxkMI3wd2vK6Jt8XwMzfV6thbDsiGkfF/LG/nxscLmqx/+MqkbOO4yLsUHSytjA5FZgSUwr/RowqkaYVsdMS1f1hvZhv78ZpjWvjKeH6WxgzeTehnRVehxJ5qDb/x6fxHOG5PlzwLyZcZvX47Yp1rH19/tzYh1c1gAAm2jP6d9XBA5Oni3r4awcAAAzA0P8+wD/u7yHZfkaeTY9ME9yvSrWwV3qZewzYP1MWaOxyeOp2GruNcmzHYDv1AjXbNig9krgwTS/gZfImExgk76NgGmK97GVlsI66hVcuZAhawC2CrUfcGnp2f3UWKnAlFtXeb1ouSyV6kxqVXQkcEnmu2OA2fx7P0zZeZLXlRnrhMNl9MfbOGB2rI061cvU/5GvUBiFrfydhJlzpwPmFbGBUpbVk9eLa7GBzJOlZ08X8a+QsQg2CB2U1LeLaPRdTwNL10ijCZiird2yNA1mUbAfMLYoO9gE8x7yFG8z0LAkWwTYNXk2G6bkyG3fxuKDV49ny+WJpG2mjXYpSY/LMQXHstiK9eHYoHvuGnL6Yn3jqsCZyf3DgBsyZfTDFMtfKH6xFfyr/PuPyVxh9jp2Oda+npHcH4ytek+XKWdn//3+WN/9hN/fHDg+U8YaNCYeRbwGJc8PJ9MSA+tvR/j3or7tiG3LmCdHhr8zChiXXG8OPApch41XfkLN8Rc2xrwbU1IsiY1tLsHa7qyxK6You8k/T5WeZbVJ/ts+2Jh2+eR6NPC8X0+LWVRUtgPJ+2djipdT6Do2Pwlry3MUQsOAJ1zW0pji5QaX8wywRGZYvkGjPTkB79cwK7FXgFUy49UfU7SPSu6thVk+rl/8v0xZ4zxtRmJKimf8b6Hg35qMMVjx/zCl0nf8+25enu7A+qzcdndmrI190PPuMr8/g5f13IWqBYB1sXau6EsexCwy+njZzC3j+2NjtZNx61lsrLIHNs7N7b+He1k6y9O+dlnqLZ/JHoCp6YMPVoDl/HoGbKX4LWz1+EZqah1L8odiE+LRyb2tsBWtx71AvkfGJAIbqO2F7d07AnMgWTwL2MDyTWCFCjl9ME32nMm9fi6jj8f/FTIb/JLs6ZPvq3vFPQnT3r5Q8W7R4S+EddRXY2bTw5PfXAesWyM8Q7BJ19XYiktqUnYamdYY2ERwvuS6D6Zomh1bpXwwU84w/zsQW8l5DVsZ3AXT6j+aKWdm/7sxtop4DrA2NlE5hkylEjbYHlYuZ8n3Q3ENeoasQaXrPl6mDgb+TKKQqSPH7w3ABoe/IFltrJCT1rk1MDPONA/rDCZnLV0f62V7LUwhkJv/Ew18aKwCHUgySa3IsyL/N8cUgO9iK1RLeryyypHLGOp/V/OydF6dsoQNkA/E2o0xTZ7Pj7VLlZMkj1vTdgebLD9LvsKtDzB7k/v9sMnN48CdNdKpUEqMxvqMVOl1WE49wZQZxQrJrF6uT8UGbQu7nKyy1I381bEB8z7AbZnvFErA9bwsvdNKWfLf7uzf1/T8OhsbrG2FKVGfyZQ1Tek6nQQeSmJRlVMO2i1PmCJpEf/eP7lfq13yMjlbcj0U6x9fpbHSPTZDzijMTPcIGsrgQZiy8wMvUz/NrHO74+bkpfsjsX7zWODamuVwG8zk/WeYJc1MXrYfyHx/pP/dDrN23IOGVeZxZLTb2FaCP9KNhRWmIHgrJ4389xNtRcH6t+8AvwSWqplG5XK0A/BXktXYjHxbN7mew+vYCzS2nKyWWSaLCXJhrXASbbRJXq6LPqoY1z2LKV5PwyeXGXLSsd/B2Mr0MzSsOc4msTjJkDfGy8+rWP2f2+9fA+yWKaPoB/pSajswZeCemXJShXsx/g7YpPcx6lk7TUPXsc0Yr7sPYX3zR8BCFTL6leTd4mXsXWzRag4yV/HxRTIvP3t7GTqviDNmNZilFEzTPLk+BFPmnA5cnPF+Grd9vQy9neT/OcDpmWGZK0njkzEr3AuS+9llqTd8JnsAppYPtsr8CraS/hvgkNLztck0Ge3hf1xEQzM3GPgajQHAmthgJHcV6UJMe7y9y30Xm8wUK53bAidkyDkLU3L8L7aSMJFWDt8vlRmumTGtbaE97FN6vhfwFyrM+Jq8txDJig8wHTYxmTMjTIWmdVqssR6ImT1/x+Uuj6269Liy4HHbOg1bIntfl/dKRtxGe0NzOjaR2cDvLwjshGtsq8obpti4FessLvB7M2Kd7aXYKsUdJMqmHmSNwfxLzE5poot1bPNik4u5MuJ2rcftCXwvfuk321GxGoithl+OrZQe3OR58HKUMwEYgw2sZk3upZY4c5ExmMQGpXdhE/6fAvv6/a9hWyce9DSfaOLaU9zS9PZy/TpmOtlj2fZ4fT/9f9jgbQ+sfbgS05T3GB5/b25s0HCpl8l1vK7s7/HNKkvYIO98bBXiakx7P5iG8uwUMpSySXmcrVwe/fkQbCBSuVqOWVw8jCmEf4INjtP8H+hlqdKMO8m782lsTetbev5uRllaABvIzlO6v5CH8x5sr39VGZgFG0wdi1l+rJI8mwGbyH2YkW9zYgPg8/D97cAIzLLgQswap05Z+hjrn4rB/kgvl0d5GbmAilVOl3Orl7+n8Em2P+uD1cf3qtK6lG/nUerb/fngnPLk//Mzuq62B7r2CX+hol1qUib3wpQAh5KYz+Z86FrnrsXao4BZd22CKRoq/TJ5en+CtQF90rj553lse1dumTwGG+zPj7Vrp2Dt5/nYymuVnKJ/uxY3saZrWzkaG6vl9AE3Y+3RS9gkdloak9t+nu6V/qZKZfJpfLJe+s1hJP40upEz2sv02sm9/qXf/JkMa4ck377w7+Xxux54rkY5KsrkzVhfv5vfX8Tv3U1Gm5TIWwSzdJjIotHL5fPYNrpKc3xMOfovGv5YZsEUsndj/cSlWHvao8WLp9UOXhaX9DhPk5SFVbCxQJWcIm5Nw+7PX6Vii4n/dmWsDVjBr8sT5vOxsVRt3xp0bZeWwMb5F1a8szSmaFk6uTcPZmGQtaDUJN82Te4NwOYdZ2KWyj/OSO/Bpes0XrNgCoHXqspSErflknvjsDH3+1hf92JVeJK4fY77TMLa72lobMvKKku96TPZAzC1fLC9ooVp0Vhs8JTt4CVD/jBsoF5o1i/D9qLdjXVQlQO1RNaswFvJ9Vhs8HYPDdOkAbgpXg9yFsdW1qelYSb3jeR5K44YHyFx3lZOQ2yieVWGnAOxAd6x2GrXqNLzy0hMOitkjcc610uxjixgg65LvZG8j4w93T3FDTMv/Jw8Jc7d3qjNhJmn/h+mPZ6nmewe5NyOrR4sj00el8fM1NP9yll757DJZGE+OAdmMns0DTO5nYD9a8btW8D/w31DJL/J2Q5yp5eVXTHz+/k9TIXmdwbcoU6GrNuK/MU6nyWw7UCFRnov4PBMOUUabY/5CHkYm/wGrDPJGUB0F7ciPIeTobEv5dloTCF5AI0tAqFG/t/rZWkBbKD2C8+3OYryWCUL277xYnL9stevy70s9dgeZZbHtBzl7i2/E5vc9MFWSF/AViVSU+NcS4c07672vF85KZdfJ8Mpo+f7If59YWz15wFscDMtNgEfniHncaydPAJrM+/36yLfLgPOrhmvazDl5lI0JqeV+V/Ku8L8+Ickq2s0FN4526bSMrkFNuD7Pu7MGGvLs3zXUNGe5JYnbGJ1kdeN28vveL5VtkulMrkeNoh9mK59QE472V2du5JMh8yluJ3lf59jYkusb5LnGyQtk4d4vMZ7uZ7Jy9TQDDnl/m1ZzFpxIX8+K02sKZrI2ZiGw+nFMAXFSnXSJqNMLpr8pke/R/6bnTDfBK95fhV+HQrrgIVJtsLWzLfZymHJrG/N2sknPa5F3lW2SaW0+hCzcJrf7wXMvH8kZnGau4L/ENaunIUr/TCrgK9hfd+qZGx/xZS9x2Pj+9uxVeXVsfFyH2w8tkcbceuLKWKvxE4Ayonb4/45qXQ/XQhbIENOeYzct/R3KGY9UzV5Pw9T1J7g8RuNKd33prHNJ3f7xUPY/OYcum7nXQdT4h0HrJEh5yasTVk2uRcwZWBfz4/DMuSU41bUt+FevsfiVnoZsu7HFBCXkNT/5Pn+OWWpN30mewCmhg/mYOta/94Hmxg8QsPT+SYkg882/s8RmOXAHNhq5tcwLfxpJKsvGXKm88K8tl/PinVKYzClwpKZcs5LKyFmyvQiDeuCWnHGtH8/TK53wrSrV9MwpR1MhaICmxR/H7MK+bp/fxxz9Fj4nziCDI0tNph4EbMA2RPrNGdKni9IntOznuI21O+dWRUmz6tnkut+nv8XYas3uXsTV8T3Ifr1h1jDfYY3mBvWyLdpsUlE4fH2aUzZch6mYc01besubhd4uuc6hVu9FLefeXk/BzP/y3aKinWid9AYsD1MY4XpwRp1ZUYvg3Mm985xOa/iipcOxC1rv10PeXYBpvHP8gXg73Z36sxRXney2gFMgVS0mft6OeyH1eNngI39WVX9ryqPm9WI2xBMybVJ6f4BmNJk0xqyqvIutyz1wQbuh/v1S9i+yxOwFddK5Za/NzvwcnI9DFOWnYRZSxUDyar07i5eE7zMZ1nN+btrk6yKYmbIV9IwW891Vld1ElL2vtSMfFsuU06PJyF1oEwegg3465TvqjqX6xNkI7r6AboQOKqFuHVXJk/F+oIspSLd92/fwSaFdfq38TQcTg/0uvYpvnhSo5507HSu5L0laWx3fBW4oqaMpvlGY8U9t751VyYP8rTPdlKXvLspjVN1HsfGYhNwZQ7eH2TI2R1b9BngZaCLkrJGeJYl8VGDjZePxxQVWcqbjLgtjylMcvvM3VzGcC9DD9KijzYvPz8mObWGxHoKW0yp7KMwBcJzWD8yAVvUu4sac5Qe8i31O5OrlNjV6+tBWF9yKJlWMhVxO9/j9gA1J/7YXOJB/34aZiExtpUw9abPZA/A1PDBJvbLYhP1onPZCdOK9cHMv2qdKNDN/1kTM216Gjguub8TNfbvJu+8i5mN3U1jBfY4Mo7Swibmy+F7QZP7j2OdyyokVhCZYVocN9fEtJnPYpruG7yhy3WWchawVXJ9TNIALOD3ck8JeBLfUuDXX6wM+vW8OfIy4jZ9jTBdi2nG+2Ed0dN+/zJKHXoPMlah0TGvQ2KKhpkW1zqiEVN0neHxvCi5vw2+Wp4Tvw7FbRMayrItcSelXk9PpWFilpveh3m9WJ6GY7GR2MDrwBpxOxlb5ZwPWykrHEsdRWZnmxG3/fw6x0KlpzyruzpZeepMppxiIDs3XVdbDyfDemcSxW1jTPFTXnHdDDixhpyqvCu2wOSUpYW9Tqxdit9c2KAp67hArP1J+5I+mLntAzTMg3ssSxnxyvY0jm13W7VIB4/P7WRak9UokwfXSOuOxI8OnoRUUSbH15TVdp3DrLTWS8rQStgE57g6Yckok1tkyqjq34rxTp0j+dKtSVthiwE9buGrWSYPzJRRWOnsBtzs39fGfCa8ha9sZ5btTuZbR9rJ0rvfxiZxS2ALQ/+lnrPSvl5uxib3zvB6WxzLmOs8by4Pw24kloTeJnxAfb8XTeNG/kS5H4lyFBtDXkBy/GgNWV/HtttuijmcfiiND7Zdt7JPodGW7Oj53s/D9E9M4b1WTpgq8q04NS833zbFxgOzejxPx6wei+O2LyFv62N3cfsfj9vamXHrg41zF0/uHY9tXyusTLOtMXvTZ7IHYEr/eOEYSldHHX2wSfX1mIau0tFHxf8YSkNLOJsX4n9j+7hmxlYBt8qUdThuduYN0I50Ndv8Icm2hRphLDq5c7DO9r7cMCUyArZKvwumTUxNii8kwzzRf7sL5h+hOD7vSW8ADvJKm9UwYgqiQ7BJSaEo2pCGh+jtgXM7FLccs61h/ncBbFXydW+YihWmQ0n25tZI9xno6h14D+COmjJmwvbp/Sp9F/OuXemEaRLGbRRdTTj3IzmOMlPGvNhq3Q/oOnnbkwyHbl5nB2H7sO/ErJcuxhUk2Gr35V923NrNM//tQpjV0KzYIOR9khVSMk6dwR0eYpZX/cr10+/9gBpmxh2K2zIermHYoOP72P7Zov3ckRp7jCdBuTzJ68mL2NFefbHBbY8Oa/3dwf53cU+n20mU3lgbVetor07Fq4nMeTHF9xlkmJN2okxO4nxL9/GvQY2TkDpVJidFnWvyP5bAFj+6+ByZDGWypf7N69M8dF0VLRzefQ1rw18lz0JxkpRJzOpqUcwi62RskeeiunLazLeOtZM0Jm3FeHcP3CEsNtG9BRuj5mwrKGQVjmuLvemLYWPCVpSUa2Km89uUysV5wC4ditvuGeEoZBR+E4q4rYMpN+oqzZeioUAejs0TfoSNTYdiFgZZR70mZeIFbAz9PLbId8JkzLcByfcFvb6dgi2q1urDOxS3YkthEbdR+DGYdePWmz6TPQBT8scL+CPYitArJEeXYJ3OE5gDph4d7FT8j/2wSfA/6HqqwMrYnuwLyDdxPRZ43L8XDVLq8Xo7krORe5CzPbZi+0C58cPOmv2c1gdsS3tD9hm+OoJNxn5EhWkpXQdqB2Ar/4+QTNYwE9XsY8ua/I9pPN3nwya+2ZYnbcZtqP/fVPkzd/J9IPBzKpQu/v+2xSYe2zLxasIgbCBZGS9scLUMyRYCGns7n/Ny8laVrA7GrT+mAV8Zm2CNbBK3lzPj1hebyMyfvHsY5ujqXmzf8jtUDLg9bvcDCyb3RiTfp8FMQr+UuHUqz/y9wZipbWoiua3n1cOYAq3y1BlsYH6Hp8NF2GBtHn/WB+v4b/6yymMSt7fp2qZvglkW3Yqtor6WKatTeTcIG6jvgq/cuNwHPUxnYwOdHo/0wpTdz9JY2RqOtZdPedn+Jnbc8WpfYn0bhCl7dil+T1cfM0tilnRVZ4R3pExOgjq3LF2dlBUT076YsrTyJKQOl8lO1bkBmNn8hjQ88acr+Vtik4AqZ5WdKpOd7N8mYP3S53SzYg9slyGnU+1kP2zMuSC2st3Xy9ULwAfJ73KUAJ3Kt46VyTQMJE6XsS0nt9Cw6Ju5Klz+u+K44man88zlYTyyKr0wpdKj2BaHQdhCwpXYSU/HY23WR1Rso+hw3Irxe7EVOB37LoZbmJFnpdJUIYb5iTkN+A/VpzMNwpRsqVXDNtgW6meTspLjrLDj+dasXnj5/wPVTtA7GbeBmCJioi2u2CLI97CtcJU+S3rjp9CWiBYIIXwX6xBuxwrveVjB2j/G+FQIYXNMS3tNi/JHYh3qxlhjtolf/wNbhfrfEMKAGOP/ZciaHltl3T7G+NMQwp6YJvq/2KDiMkzD/O8Y4ycVYXoV27cXsYr9P8CpMcYn/Te3ASfHGN/JjOe62AT9gxjjY37vcGxrwMPY4OLvMcZ9KuRcgGnHb6PhvGcE8EmM8T8hhFOxRn+rjDCtgjXMH2ADiqdijP/1Z0dhk8vrYowHf0lxuwizAtk7hDDY4zY/8KrHbSNsknFKhZzLMXP9zzBv4gtjSpZLY4z/DSEcjWlP9+5JThKmUZip11UxxkOTZ3tjJpgfxxif/ZLidoXHbTBmoTINVubviTF+HkI4HRtg7pARtwket29g5pon+v1R2ADiv8C7McaHK+RcDBBj3C+EMAxTMCyJKdz+HUJYAZsAT/gy4tapPPPfT8CcWU2HTVAujDF+7s+2wo5Z+3uM8Yc9yJgXU3LOE0IYgk36l8S8/F8WY/xtCGFF4J0Y41++5LiNxay4nsKOiyvq/yqYkuK/Mcb3MmR1Ou8+x9L2gBjjv/3ZEpgD0xhj/EmFnLOxAdEH2NamV0II02GTul39Z2/FGB/8MuLVJG6fYv3LdMDfkjK1dE9lyX8zgTbLZKfjVyqXl8UYj2zym01jjPdnxG0sbZbJSVTn5sUG+gfHGGMIIUQfXIYQ1ogxPl0hp1NlsiP9Wwhhcew4yXEhhJkxBcsvMI/z98YYf97T+yVZE+hAmQwhXIb54pkfy//fYeOdBbC+6MUQQr8Y438ywtSpfJtAh9pJf+dMj+N/sHHAqVhfeytWLp7JkeOyzsHqbn/gknL6hhC2wCbmd1XIuRkrR9fEGC/wewth22oGY1Y+T8cY7/0S43YcZu3yJ2zB7Fel50cCH8UYb6qQswCmbN8lxvhocj94eZgB22a9TIzxlz3IuQ2rGwti84KjMF8zJ2D+5V6qEbdO59vVMcYL/V6fpO6dhG2j3r1CTifjdjk2N5kds1x/qPR8ccwCrsdxZa+ljvZBny7apOkxDdXSpfs7YQ10lqOmiv8xAfewjXnv/hdWiB/FOslsj7ku4xysAxmBaek3xzrxq8k0bcTM1+4t3dsN69zOxSpLlmNAf3csdlLEuZiWfvXk2bSYSegsVHuIXxzTlM/o18P8U2jfZ8ZWinM8+S6NWTdcivlduBBb/SvMy9bEBr5Vq2Wditswmp/wcZv/nR9rfHvUavr/+lnpen1MI3qs59305J00sDg24QfT4N6BKdROItN8t8Nxmx34hX/v4/VlD8yCZ9vkf+V40E7jNjNmTnoMNZ35eFremZSbm/xzH7Zqs6yHtUrT3pG4dSrPkrL9sn9fGdvetWAdGf7uEtgkbc7k3tzYqumrZNTXSRC3JZK4zeuyNqgjYxLk3WLAa8n185gi+yJsT2hWX+Dp9DLW/hxKCybknYxXD3G7De//KK0yT+oy2eF8a1Yuz8VWNteuEZ5OlsklO1Tnxpby7RFsLHEa5gG9Mu87XCY72b9dgPuSwlbr/4Et6FxFxvHHnS6TmEVCUR4HYfvCT8QUHbW2pXQw3zpWJl3GRli/uASN4/ne8TJRbIfJ9Xu0AWa5sRS2VeoQzFHndmSeNuNy1sPG8+M8LBfTwv72SRC37wPreprvgi04rk6m75xE1rWYQuFKGo6nU0uVnak4MQpYDW/j/Ppb2Pj5Qhom/ZM73y6k+fGjVb7POh235zEF1J6YQmlzzEF8Vp3r7Z/JHoAp9YPth97NK+Sg0rN9qeGIrof/sTwNxzFH0NXZym3AWjXlHegN0TW4AzC/f6g3lDkORYZjneqqpfszYZ1Sj5PsJvIexrdNYPu27sU6uDPpaiJaVfGPx7TjYPvIbvOG8nQavhSytjpgA9mD/PuMmCb6cGxwWxypV6k06VTckvxvdsLHqfhWmIw06uN5v1lybyBmJn4PfgZ9ZnjOStJoc+x88dWwjvI1Sp6sv4S4DfF3U6XNMMyfx/M0TGlz4tZsMLmpl+/3yDjbO5G1C+Z8aRS2D7E4XupbZDg/dRlDPW6rtRM3L7+dyrMJNJx/DXfZ71HTMZW/fzQ2mF2GrluwLiT/hJBOlsczsNV/PL/2xyZaq7YQtyLv2iqX2Ip9kd7rY97858NWPm8F1s0Mz1V4P4JZ1T3n4SvatTpe3dsuk/6bQ5L6Vo7bLTXidl4Hy2RH2pOKcvkqJafGPcg5vVNl0mUc14E6dzJu6o75KPojpqzYCTvNIPf0i06VyX50rn+bh4aJ+p7AmsmzCeSfMHBeUrZbLpNY3/gUXZ3UzYyNK16lXp90UofyrdNlch98fzwNn1wLePk4oKasR2k4QD3C0/xMbMHifPJPC3mahtPKubD2aPvkea5jwE7G7YUkbsd6mt/kdTdbFo0J7vRepu+lYttVN3I29HeHJ/em8TA9SMYRr8l7j03CfNsueV5sq6hqv9fvYNzuSuJ2KGbxtB82rnyQzOMme/NnsgdgSvxgg7t7MbOcc4FPSDw1Y5OFe9r8H1+cBe9/i2Ozir1T9wI7Z8pKz2/dAhuA/pKG5+o7yD/ztq83js9hjk0G0Tgm8kXciV5uHEl8NgC/xTSRi2KDlRvIH0yshlkgDMYm7WthmvjjsVWBbM0tZrXxKF0dOc3lDfZ15Dlh6kjckvxfBXMg09YJH57/P8YGNunxl4cAE2rIGZw0yl8ncSqJHc21W424rYF1HM+0GbfdML8lB5Ioj7B9ilmnVtDwLF9on1saTOIdHzZQexdznnde8nxz3J9Jzbgd0Ebcpk3SfMtW8iz5/UQr4tj2p2vIP41lJv87Ghs0TMDMr4uB/1vkO5sdTGMC0FbcupG/n9e97CMQS3n3ZDt5VypTS+NWL359UE7dxdrqNUr3pvN0r33etdfPtspk8k6Rd8u1Ejf/7QxN7tUqk03yrd32ZLok3zpaLlspk8CO/nc2r3Pntlrn/PdFe7IaXZUvx5DhGA6zPlutg2WyI/1bOTz+txh7PUvmMYF0oJ1M3iucSm9BMvHDlFY71pRV9N0t5Vsny2Ty7nyY0mSLNJzYwtpduCO7qvLo5adYiBiEOcEsTvdaBFtsmjNDzmDc0WKSXttiVqeb1ozb/J4uW7YZt6E0FFSDsHHFIti4fBOXleWzzdNhA/8+vV9fW5RLauzjxxRn+1Ga8GOT5UoFVZJv2/n1gDbybQiwq18XyptW820A1ka2G7dpaBx73R+zCClOiJsOm1dkKZd782eyB2BK+2Arje9jk4RbvSKvjO2xfBabdL5DciRIC/9ja8z8sKmJD6YRrPTk7b/dnGQl1CvbktjqxIuYo6scj8dr0/WYk7WxCeDDmJb0CpLzo2vEteik+1I6PxsbzNXRvF/on3vo6qTmadykvoasS4CbSToAv/8kFU5cJkXc/J3ZvOH+N6aUqHXCRyJnPmzA9QxmNrkQNpD8Zqtl1uUWg8unqaFYSuJ2bgfith42wLoaWzGZweOWdd47JXNYGoPJIm7PkjGY9LgUDpP6eX373MvnOMyhZ2XcMIVY/yRu57QSN7o5aquVPKO0upaEbxZsxeWmDBkHkEwYsNXf7bBViHsw50RXZoanqbl/Uv/qxG0Ouk4eiwFJP2ywfQf5RzIOS8KwntfdqzGFc52869HZE7aquHVOmLpJn/UwBfNJLchouUwmZaZ/OY5JuWw1biGRn1Ume4hfS+0JZrU30Qpm3TqHTa6HJNdpv5JdJrFVsT/gfRq2Yr4X1lbdWbPObZfGpUncniSvfRtaui4US7XKJF2VvvN5XXsGW43P7t8wa6lzMQuFZnm3N/BoZpiG0tWxaNFOzlqnTNLVQ/0+Xh6OxcaJfT1u22TKqjoCNivfmuRXS2WyiZzNsP3pt9P1CNMf0+KEi9L2FMxibSKneDXkfRMbB2RZ8iTvbYQtnNxF1xMi2onbvKXrV8g8jQFYspT2s2MT3f0z35+Gxvav5TBl0EOYZdmcWP/3PnmT7okW1ygpD3LyjYnbo7T+Zedb6b1xHreHMQVcrbg1iyelbWUet7Gtlsne8pnsAZjSPphmeX//fildvduu7YWvR2+vFfID5v232Hc9PabFLI7gmRnbnlC5V81l/RD3+o0Nimejq4njnFSYEbmc90hWjkpx3ssrax2z4oXLDX3xv/zvzmSu4NLwkzATNiB4G7NKWANz6PS9TDlDcKUJ1jHujilKrsG8qx9KsvewQla3WyJy49Ysjbw8rEKNEz4wD+VLlPJzBmxv2l1YJ3JUZrz2wAZrr2Om/HORaLKxQfdjGXJ2xRQHL2CrR7N5I70qNrjNjVsqZ38PzxKYRvphzJTs1My47YtZjvwGWKfJ86zBJGaG+jnWCS2c3J8PWwE4Ati7hpwrsG0gAzCrm20wL85ZcfP/dyPdmPfl5pn/9pCe/ic2IT+vQsZI7GST2f16OLYH9wtv49igO2ev83FeF9ak5IW/hbgdiCkzPiXZXlYKd9Z56nQ9necbXufm8O8PYAOvnLzbA5t8fALs1U3e9uiB2383hB4sorDz57+TIadLm+RlcrGkTGbFq5Te72F7wfcgGQzXiNuu/v73XOZsdD3GubJMlmS13Z5gbeN9wN8pKaXrlEsabcCTNE6cSQe7WWXSf/cGNhF9iq5bVUZiq2S5de4o4IEenh9EXv92ALBnB8rkYZSOW8O2la2A9W9XkN+/nQH8HjNp3gVri0Z7Gs2GKSgqJ39Y/b/J838iRQY27qosk14nLsSOv10bW5VeweP8EDZBmpAZtx0w5dgbJCeGtZBvK+CT0eReuoAzkhrKSSaeBA72+vwp1obejR+vmCFrFa9b62FbX8ue/ceTccwrNjbaBfNLsDolRQy2HXOi9riJnAUxy6Qx2PhmqNefVuK2JWbd0t3iQG7cpsPGOtMWdSV5tjLmpP30cto1kXMetrC6QKkMPYuNmR8DLsiM28X0cEwptvCVE7cJ+LaCbp7n5tsFlBRrmDLvGWxuUSduJ3jepOPk1E9FVtymhM9kD8CU9MG01q/QWLVa0ytUZadX438shTs99Ib5Uazj+BA41u9Xmkf5706hcRzNIK8I92GD00NqhGk85uEWbxAP9AbgZGqcTZvIG47t2zsP25Na1tbNAvyUUqfVRM4Ib7ju8E/AGu3VsA73cWyAUOkECfOV8DCmgPnQw9gH26KwkzeSx1Jh6eBhut7z7WX8LN3k+cyZcRuFrdCchW0pmMiMjbxVqVHYdosbsMngRGWH/H2AI1zWOGxLyV1Yh7iNPx+Mrb4ulCHnPWzlaAXMs/fLwCZ1wlSSsyKmiHuRpMMl38HUCGylYEFscnQYNihZLSmzJ1MxmMTq7Pcx5cGxwGF160ci52VMKTaBJmdck3dM0XAvz8VkfQlMMba5/4+hOXmWhOmHNLYqzIlNtCqP4CvJuZRGGze3l6E7MSXD+Nzy6O9/C1MgTvB4zYs5eVvY87RO3H6EtQNLYnV4Ly8Ltcx3XdZbnj7rYtZOm2ID0xFFXcmU8w42YV8D2yubmgMPwPqgHttgrA14E3OoWz76sI9/BlCxlYuJ26SRqRz/m7UPFFOQfYopNhf0tD4Tm7At5r9ZKyNu5bbkfayP3iQnHD3Iark9cTnvellax+O1vde5GXPrHD20JVhfl+2fCduDfIx/P9jlTqQ0zZAzzNO42JK5hufZnthWnCFYm1DVT5aVikOwvnZM8pu+GWWyaCcLZ8yLYJOlTWhMmLKd6HlZvBab3Dzg33+NbwvIKd90rf/rYGOlLbG2YCKlZ4acJTDrrVMwfxCLJb8ZQYZpupe7t7E94ZtiE6NlS/n6zYx863Y8QYaD4SbyFsLGbhNNArE2Ykes/cvp67pzpr1ckrevU+1MeymsrT0Tsyz8LrY9KT02cDoqlG8u5xVMoXUztiV3d2ycNAxTXi6aGbclvb6c43Fc2uWM8ufzeVmp9GGGWeB8QNetLqlSYUUqtmJ5uN9IZG2QPJseq4fDyRujLoq1l4VzylGYf4lF/XruzHxbxMNUWJVO6+mcbn3KybciboOS8Mzu5bEftn0lN25DvQz8Ghs7FT7VRiRh7rK9ekr+TPYATEkfL0zFwLxYSV/YG4qd0vtt/I/BuAMhzHTsBL+/ENZxZlkBeFhPwrTke3ihPd0r1NKYyX3lHj6so3+HhsPDG7AO8tvYIOfMmvEr0u0ObKB9isdzFUy7ORxbCcjZp345NpGYCxswjcMmAMOS3+Q6cbkcO+oSrDPaxcO2cjnsFXKupHF86JHYkT7Fs4HeMG2aIWc8tuK2N9YxHotNHorGcqL9wt3IORJTrKxLw7nNTjTM8eclX6GwAXbcYXpvC28or8EGgJWeeLHBZ+pfYjGs0y4c8PQlz79EMzlPuJwzvA5UOhr1dw/HjgDC68ffvWz9FKuP/chwwON5dbp/XwU7cukiMvxulORcDJzi3zfHVs2OoeHUMdez8OzAXf59YWygdKqn970kvjAyZF0CvJ3IfQZbPf/A02hgVbiwwcZZnk47YQOFfbCOd2GsvRqWGZ5i1f8uGqbAl2InzhRKrtyyfTa+YohZgfwLGzBfhq1KZ5004O9PoPnpPI9h7XDuiQxnAeck11t6Ohd78nO98Z+HTSbuxFbF1vR8KJQAuVs4xjNxm7QWNdsk/+3cnq6ptdzimNLubvIVE83agMexNuA0akxyupFVuz3BJrS3+/cFgX9iE5InsbHCkJxySUZbUlXfinKCbc9MTecPxvq8Wh7rsfbiFv8+H1Z/j8D6vbvI95/STKl4G6b4OiUnffzdi4Hf+fdim9wEGibKWW1JSeaxNEy5H8aOoDyV0uJAD+9PYOL6fzxW97PD5Hl9hn9fGTue+3RsPHYf9bzen1+UJb8+BNu3XbcNaDae2JHGeGIe6u29fx5rB2708rh43fwqpXt3zrTnKO5nyLmChq+RGTFlwEVeLuvE7XrcgaOX8aew/rLS8rKJrBtpzDGOwNryZ7F2qVB2zZ0hZ1Gvs4dgVi8TbZXBxl9Vp2o9RmPb0/7Yin4zHzY57dMBuNUXNsZ8GGsn03zIOZ3tUVwRgik6r8IUp6fTxLq6Bzn305jvrOFp/SQ2xt2qTtz8d6t7HJfHxl1PYMrqof48y1n8lPCZ7AGYUj/YYLbYD7ONF9za3qS7kb0FjSPBtqXR6F9Lk5XKHuQM9wp6LvCD0rO7Ke3rbxZH/7uu//7ZVA42mH+MjBXAJrK39Qq/LrYa/DKmYc3yK4Bp9tKjj36GeXK9CjMzztpT6O/Oi020+iWyLsAUMq+T6aQIM2t7PbkehR3PuaZfZ3tx9fjNhQ2I1/SwXICtnHwDm8TlOIecERv49cGUUrt5ufoONgn7OZkDE8zK5TpM2ZI6+pwG63SzjsPCVm6uw45NnBOb8Ozlci7PiVdS/q5pV47LWhTrwHbDFCSF46OATbwrOyT/v9vR1fxzTmwwUMdZ6RCvE6nZ9jIe12Vq1rM+mCJgf2yytkvy7BZgw0w5AbPYKHyLfIj7QMAGIPeSr+wcjK2SXcbER9A+RY3j9Pyd7Wl4ib8FWw04Hduzmtvpz01jYrwvyZYUrE3J9i+CrZQXslo6ncfzbSOSSYzXv8f9+3zYykfVCu5AbOVxNpd5KKYIOtvrz6LYACpnlbMjbVJSni71urFi6dlN5DvjnJPOtQEdaU88ra/CBvsvk3hex/r1TTNkFG1JaiZbuy1Jy5P/Lfq4EZjl41OU9mFnyDrHP2eRrGRi7efuGe9P5++eT3Ol4o3kT7oLT/kvY5aXuyfPbidZha0Rv/m9XA/GJu9reP7n+ITog00citXWVut/P2wcWJzydSru+BvzOXIvmXuusTZgV7q2JaPw7XvYuOVFMpRLdHY8MYPXt6WxtuUYL5PFws78wDdq5Ns22Hi0mTPta8lbUS58QFxNY5vxdR6363EFT4ac/lj7cSJu1eQyD/YwHlcjXgNwhTemNPsUsxAbS8PpeO7C2SM0JunbYH352KTs5igA5sAVpsn1ffgJXS3Ut8W8PM/g9WN1bMFiW49bjjXfKKz/uQtrE57GxhjLYIqg68hbpPoapjC7Aav3r2BzqLmxrco35qZ1kqazYEqEVf3e97AFoqupsVAxJXwmewCmlg+2l7fWMY6l92fEJrYr+fV+NBw9bogNct8gbwV4LLaVoBhEzENXM7kFMCcgVYPRYXSdHO1G0sBjA5wfUfPsW383eCM5Btu7+TtsYHABGSsBWCe0RvI9beC2BsbXCMtQ3JTdG47zk2eLeaOWs7d0LhrehYu0/xZuquoNVZY3Zm+I0pWoYVgHcALwMa5BbSHdp8EmPLtj206y9pYm7xd+Fw7zvCtM7p4lOU6ph/cX8r/HYyalN2EDv2KF40kSB1s9yOmHDZROcDk3tyInkTcQW305Cpvopt6YH8RXBzLkfOGkyj8Bm/C+TQ3ncjSUeQOSuB6DHTWUNeHGnRphK783YFZB59Hw4nwv9U52CF5XdwKuL5Xzh6lQ4mF7kRenMYCZk0QJhQ0kXmyhTI/CJjPzYavJK2ErU7XPRPc4FnEq8uBOMj3OU1phwxVvicx7yTidx/O7H41Bbd8kLOtgA/FDM8PUj65tyexYO/sgtkqVu788kLT1WJv0Ddpok7xMn4v1b4WTsMfI8zEyCpsAFG1Ju23ANJgVxoM02qWhdWV5mLbGLAO/gylxCvP7e8iYdJfk9fc8rN2WYGOBDbFV5OmZeE/5JTlp7b8tTpoagSnsbsP6gjF+/y4y9icX9QLrzy6ldCIWrSkV18EG6P2SensfGe02NnFdCpuAFJ7Xj8J8V1xeJxwluX1omDbXrf/DSnIWKMl5kHqLJtMl+dfH0+lxrD2+isy2pInctsYTLqM4UWk45kPpPGwB639o4uuhQtaFNHem/RT5zrRn8XL5GNZvPp2k4fXkK0yW8LS9zD/P+f3ZPJxZ1pP+zhis7b8DeKL07HnytvXNDFydXA/BFHuP45bXdcq2/03HO1djStRsK45E3ulYG/4YiSUoNvkeV0POqcCfgUdK95+gic+2HuTsi82Rbi/df54WHGhiyomDsLnYz7B5xaW0sO2sN38mewCm9A9tbnFI5DzsDeEt+Co9tiK1P6blPJrMATK2x2qb5DrdHzUdNlCayOlYEzn3YoO9lZrFF9MCZg0gupG/Fjbwv8E7kenImJQ2kTMTiaYP097mOnQMJPtiKXkcxxwhfT9DTuGtfHDp/go0HEQ+nSFnIxLFlDfUaf4dAPwiQ87Cnr770tinmubdHsDPM9No8UTWbJ7eF2FmtJdiA8nKU0ewQfZruGketsIxE43B9hbAjzLkrE8y4MTMikfQmEhkyUnCsAqwMY1OckmP0xZe7yqdelJtIrgriTl1D7+bH5sEbExzp6WHkOfMawts8lKkydKYefAjmKXCNcBTmWlUTu+BdO3016NCEeD17ElP14doOD8tBv/D/HmWxUQis3h/P+BvlJyzZchYBttSchZdz3gvysJmVXFL3tkOM49eP41b8jzrdB7P+6YWEVj78IfMMjkaG7gsg/Ulfenalownsw1olvbJ96w2qZTeZ3vdmw5biToNG6w9Q2kw2EP5Pi7JpwW8DA1Nnue2AeU2dwzWLk1XRxamUF4Ka+uLNm12bHV7P6wvzcm35TxNzqWJCTj5bUnAVu4vxxQkhW+K1OphHSr8+SRpcCyNlff5Pd/vxFZ/7yTPYe2ydHXiNhfJAJ1MpSLN+7c0XhtlyinS6HZMyfYDbEVzALbFqNhelLPVZW1sYvQUTXx/kF//t8SUZOWTT4qyvm6OnCQd1u5Gzo7YQs7zLaZ3S+OJJN2b+XMq0vtGMsZL/tt5vc4NwPbN745N4K/xOGY508a2lRRbIxb2/NyMhr+QvXPChI0DizHO0tjK9s40lEL75NSVJuk1FFMKnoEpYhf2+D1bR1a5PGNjuNub5UeT9w7Ct2Im9wqlQrEtO8uaD+ubCiXXNJjT0B9hR9Jv5mWtMm6YYiR1Oj6Krs6wd6LGEd3Je1+j62kcO5Hv4HkgPn7zcjkNNsb4M3Bkkad1w9TbP5M9APpEMKXBA94QFSukx2AdWy2TGKzTetK/98UGAZdgZq3LYBPUHk33aJzl+oo3NhNczgjM1HUwZuY0oWbYtvMGaS+vZH2xjuOn1NsPuAWmNNiZxHKAxuTie2RMTDye52PmflfT/OiaH1TJcjl30WTLC9bBvQn8hbzjah7Hjk68gIZiKTV934MKk1kPzwsepwtwZU0pjbYhz09FwLaiXIgNcD92ef2xQfxaXuZ6NJt1OW9gGuibKTk3wzrKYzPT+sd0Pau+D42ByHQ5ckrpdDm2IrJa8mxrbOJ7EolCrQc512LOQMvHThad7TRU7DF3Oc9g9fUkbBvOcWndwFYZeuz4Xc7b2Gr91XQdaK/mebY2GduLuknvATRMp4dj+6er8m08cIV/P87T/ChsdXtZz8Om3qubhOdcmqw2YZOwYrUrZwJQlMl9MNPkzzHlZuHgbUbPh5x6G7D28krMvLLsaLZYlepRKZyEafnk3gi6WgZUOtSjqwLnARqTyXQScBoZ297K6U2jDSkmJnuTZ8afpveRnt6X4maymNJjMSp8lbicX9I4CWkgNjFNnXEdV1UmE3lFm3sxjRXq4oi/weS3S09hq+JXAgcmz77hsg+hYsuSy3nf/+d3MAd4XUxjsfpX6a8CUxxe6d/3x5SJF2GrnJVtfw/pPQCzVOqPjRNGY2b+OQ7h3vQ4nVbOZ2wAX6lUZOL+bQJdFwGGeZnNOQnrcBq+HIZgyukbPS+X9fs5/i6KfnJLbMJ3IGYxVet0riZp3d/DVCioZsX6z7ple6LxhMf3fSosa5ukd8vjCf/tjF4GH8XGXluT+N7A+oKfkuEMF5s8vuifd7G2cgA2kf8mtuiV40x7JDZubLpS7+n+LtXOtEfS8G/wfUpWwNjC02/JWHXH2rGJnDZiyrxzvK48ScZWQ0+XxbBxyIylZ0OxfqLKEeMI7ASsX2Hzk4GUnMNiSpkFMsIzI2Y1MCK51xezWtwda6sOq5JFV2fqv6E0X/L8+CAj34b6/206tsIUzD+vkpOUyXu9fD9IQ/mzNHB8Wq9y6suU9JnsAdAnglkgbO3fJ2CdwJ7YYPsK6jmCWZTGiQxHesO9KtbBPUA9D+q7YZrLrT2MD2Cd5rAW4niCV7CjsMnE1/3+NDS8uebscTvVG6LTPG3eoOt+531w53oZssZjE4g5MCuCMR7XYlKyERlOJ7FViZv9+wCs09gweX4ocENmmDbyNL8YM28/xBvMLIdQSRpd6t/nwybG27VYNvclOSLMG95bvXFdt2b+n+ff73YZxWpAMSjJ8XZ8InCHfx+MTfiuwsxv1yvuZ4bpRBqT3L29fJ+CDZyWzG3wPQz/8nw6BRsA9st9P5FzfFJ3Z/Ty/RLWuVZ20iU5hTOv+7CJSdZpFzXTu1iJr/JW/zVMMTdnEqabsA78aOCsGuE5DZuIvuvxrOXssiTr23TdKrUjtmf6t8Dmfm+i01V6KN8X+fdrsMHkksnzpity3ZSlR/z7tNgA/mZPv138fqWvHnpW4BRezyuV1d2lNzXMdSvS+yfYQHDzGnLWAu737zNhg+E7MeuQg/1+dnmna5v7Sxpt7iK5sryOXeLfV8UssWqdU+/vbk/i+BZTtF5IyT9AhpyATZaLI67v9TKwOqaIv4L8fdfN0vseD9O3csPjfwsneediyoXtPd1nxvrOHKViZf9GhgPdJK/OLt3rh40jTsmR4e8cANzp30djbcjd2OTrOL+fU/+7K9ufJmW7zligu/HE2OL/dSK9a5bxq/BxFbYq/QCmkE9XmSsnbv67S2mcXnI6Nua9xstVHeuSL44WxZS/a2Bj8fmT3yyRIedyGn3vJZh139ElOT0uUCS/uxtbrd8BG5umRw3Oh02WK7cqYBPcwiHwjdgi2iYkzoGx8U6PWwI8jQ7HrK5uoavSu+6pM5cn9WKU//8DqeHYN5FT+NyYgPn3OpOGv4Jh5C0I3IH1sRd6eqcLlYXiPMuxo5fJ4kS+rbFFuEuAVcoyp7bPZA+APhHMvOdzbFL7VxpOXGbxxna5GrKm98p+HtZpz5Y8u4Uafh4wjeF4r/CDsb0/T2Cd07I15Mzq7xarPjtgmuPixIxcT/zDXU7xXn9spfURD9NgbCJW6UXdG5rXaAwan8ca8Jex1d1VXX6Vn4kBWAe2jl9fhq1Q/hCbdC+H7RfNmpi4jDOwDmMYZgL2b5Lj4irenQ7rNNLO+es0JqqDsYlFrlfnxbGOpLyVY3NvOHNWb2bwNC327s/s6XRAzXoyHaZsOx3r1O6g0Tnuhk+gMmUN8bpVKDVu8vq3FjbRvZY8B0X9MT8ZC2MDySswT777ef4NI/OUB8yk7sjk+hDMvPlg8gfuM2KToln9emmP50510trfndbj0iy9d62Z3vMlZeGS5P7s2GAnx2/KQExhMx+2SvMYNikpvE33wQZvuROllbB2slix+5bHazUSPyoZcoZhK5pF+Q7YxP2Emuk9CFNmXYJN/O7DJl+LYI6h7iRP6VqlwMk6mcfT+9QOpveK2KCvWXpPqJFOM2B92UzYBOI7fn9JL69ZxyqXZLbT5g7B+sX0WLn9aEwshmKT+BzHl6M9r7ooNLGVv1uo4fQMsxr4f55v7yf3p/Xw5vqEGNFNei/l6T26RpjmwybJ82N949vYVqUVM9+fFuvfUmVd2r8VDm1zT66ZFVtNfpzEYgvra54lfxKxCjbeWg2zBj3X7xcn61RORjPLdm0nbt2U7S0z3522KI/dpHfd8cQ0nn/bJfcGYO3IL6nnF2IwNmYrFuPexRSgh2Nt5emZcub1NCn8X92M9Qs3YVtC9syUsyBdHYW/j1krnY8p37J8lfi7c2Ft7WGYAuZcrN8dXaRZDVkX01C6rOl17goyfHkkMhamq0P2ozHnx604iB2DzXOKBaDrsDH0PdiWgCxHnHTvTP1EzNFr1mka2Hj0BczyeUdsvDaBxoLJtOQ7iy/KZHoE+nme3m9SQ3E+JX4mewD08Yyw/Ufre+O/eXL/TfKOd0xNYmfxBvVv+LFjWMf5IxIFQ4bMgGkNT8W2KrzoDcuBZB595nI2o+QdF7MMWNm/H0Fibl4h6zRKZxZjGuXLqenRGVsNPMQb7Z8n9/cgcV6TIWdbGoqXh2lYOBxAPeeQhXn85sBp/v11bHLyayr8XtB1P/Ho5P7sLqcP1jGdWiNMA70xfJzSvlBsEtWjlQKNlanC3LJYNVgJG0CcTZ6j0ULOOEwDfR/wSvJ8emzgXEf5Vky6h5BM/rDB5KMkGvgKOSNIBvpYp30fppR4gWQCXSFncez0hAcxC6X3MBPA2bEVxqpzmIs0WrR0vSG2EnwA9c8JXwozr7y3m/TuMY2o8ByNWQe9ScWANInLzCQmm/j+dqwTf4J6E9NZPA4Pe714lYbi4yHyLAGKOlfsBS3K96KYNVf2MZj+3nzYZPSaNL2TMPXorC5JpyIew2muwMlyLIVNbGZOrttJ75k9rR/HBlm109t/2w8zI30UG0CmK2W3kzEgTdKpGIxuRntt7hh8b7lfL4zv4ccmFMdnhKlwlncMprxb0e8XivhHyZgIYgsB6Ta5MZhSsDgKbVaPY50TOdpK7yJc/ndvGhZAP/ay8HPyHd/OjyvLkjKd9m/fyZAxK133kZ+BWSed5+V0HazNzFEET++fazyvHyMx/8cU1VnObz3vjvI6cmGTtK6ccMNEvlu6G0/smxmmuenQeMLfX8PTZCm6budbm/rHkG+GLSjdBbyR3J/F78+eIWMEriDH+qKXk2drepnPWVyYicY2mUWAq5Jnq1PDEaPLKsbHIzEr15swK7FFse11lWUKU1JfQeJU2PPsNGxRb+fM8IyhZDWCbWG+jMzji5P3FsDGy2fh216SZ98go6303w7HnTrT3Jn6DWQ4U/ffz4ktDg3AFLEHY3OKQ7DtN9+uEb9tvcxsiymSf+D31wOOqJNWU9pnsgfgq/zxgnscyUQYW8V5CDN1up9Mc2CsI1yhdG9bzMztIUwLeEKGnM2wTnGN5N4p2ORvab+ubT6NDZT70hhUHO5yZ8YGlrmDm22xI1fOouu52ntjmu+chr8w213P0+UobFWgGLiNIdF8Zsib3xvnx7EJchHHZTDLhx4ncdggMh0ADvKG+m2SY/Uy5MxFqcOiMXA+HdOWv0reBL44HqrwvP4dTHt8mcdre2zC26MsGt7qJ8oXrBO4kbxTHaahq6f6jUn2o2IdwpvkreCOwbztNu3caUxyc6xTus0TL6N/yEnv5J2hmALvSBoDivmw1ZceVyWY+FSWtEytga28zJ8ZjsVIVmhbTW+snj+GKe92oKsDtn6YUqrSsavXiUFJHe1Xen4V8KfMsj0fXY8W2xVzdLawXy/kZTunLI1yeav5dWqWOj02qMk+6jd5d5ki//16bjImgeUy0EPZrsq3cv63k97TYu1AkXcbYxO2wjqsTnqn5fs4zDfN01id3srTKCdMo+k6oZwGa9veBe5L7le1ucOxtnLO5F4frG24Ems3s9rcktyDsPa/GJgeREZ76+8ejyluzkvuzerh+RVuQZUhZwGSs+09vf+MrdzXSu+S3IWwydFTRVpjE8Ae+1tswN+/dK9Q6NTt356ktACB9eM3Y35sriTvNIZlgIuT60GY5cUErF3YErOErOonv/BL49f7tprW2CrpEBpK/KJsv1ezbC/CxEe6tjSeKOcZNua6E5uUzo8prdYi05FqImsGzCpkKUyhs7HLWprkCO8MOYM8nY+lq4PWjchzWvmFg0G/np6uWwo2wE95aPWDWakcg22D/HGN99b2+nYstu3sdb+/FHlbeoeUrr9QomP92w/IUNyUyxE2JrmYZPzn5TzbgWISln503aaQ60y9aR3A5iXjMMV5Vlpj7eVwTBl0PGZpeCHup6hu3KbEz2QPwFf14w3fK9hq66eYQqCoHPNiHUqu4531SbyhegeSNmbLkrc/fVNvHE7A9rUu4A3jysCu/pu29v7Q6JAWxDrLl8g7Iiw14Z8TW8F5DxtoLY451MnVuB9Hw+NusaJ4BtbJrYgNNnImOWkDNhTrtP+fN9yrYQO3nLid5P/zTBordnNj+9TTVa8elSXYwKVpmcE07Z8D22amUXHqyG00PBYP9XS6ERsw5awEnoENHM/xRjZ17ldsqclZdTsfW7m/gtKKLzYYy8oz//1bdLMHFJuQPZuZ/ydjg6LxmAng2NLzc/F9rxVyxvbwrL//j/0z5Nzr6TyRlQY2obuK/GNLf+TlZaIVMQ9TZXpj2v3nsPbnaEzBcjrW9hVOPbO2Fnj+P4Bts5lon6XXvYMyZb1KN04Esclv9nacpJ7cSMnPBTZo2oW8PZzd7qv1vHuQvLakpzJQR4FT5P9WpfuFsrROel+CKWzPwQaQ6daA7PTGLGyKU0oK5cQIbCXpKmzy3qPD4UTWRG0l1q9cS7Jlguo293ZM8X8VyYTCn32zuzrURM5EZQSbLJ2AKd5PKYe3Gzl7Yat+YzGLwg2xhYt5MeXXCuRbp0zUTmJ9wFnY5C0rvb0OXOn5N87rxbpYO1ucGJCzz32ifjJ5tgaZ/RumQLyt+L9YO7Qeje1v2Suu2PjlI5JjF7Hx1yXY9slryTtOuegnz8fGIXNglnLnYtugTq1Rtq/HVuyvxxXILu/KmmX7re7Sk5rjCX9nY/983dN9Va8/l2H1+iUSU/EKWRNtacHGwZd4mr9aVV+8HF5JV2XgdHR1Yvxyppwf0cMWC49b7nh+c0+jnWiirAH+UTedXN6p/lkzyY8eTwvxuF1OqW0r/eYKMrZAu6zDsEW37f3eCGwsUMx9XsxJJ6oXV3Kdqd/r5bDpIimmNN8sM26Pkijg6Kqw7oNtycjKtyn1M9kD8FX9YMqEYg/RzCTnZ7cg66e46Tk2iLkccwp0LpkO6vzdJ2nsGzoHGyT/GuvgCm/FdRyvrEbizZaJz8B+nLyjnXbErBImlOQth1lfnIOb9GXIOoDGecBFIzbCP3fiA9MMOVthHeF3Ma3xKn5/MW9gDyLjbGdMcfQINgCcgA10Cx8RWeZa/tvdaDjPC54222Ca8Rk8frmm992dOnISNijN3Z96ELbaM7+XyyM8bb7dXQPejZw9PZ9HY/vs9sRWM9byeA0HTsqUtSN+9A+m0NjWw/NtbELxtZyy5L9/GFP87ISt+t6M7VPvh00CF86Qsy9mRXQpibdmGhO36ag4c57mp7Ic7XFZkxptQFKWrsMG6XfgZrdJfZmBjK08mFnl15Prhb1MXE3DGiDHoqic/7t7vIr870/+fvDd6bpKtym2l39/zJS3D5lOz5rUk0u8jE+gxlnVXpb+jbUDyyX3C3P8wVQrb6rKwFBMiVupwGmS/4UZbx//PwPJ3F7mafuQ59NqmMXW09jAdnpsZTDHf8Y+WH9R+Jb4HolisU4ZZ+K2cnlspXQdGpPKyq1BHrcHPI0vwRTJe3h+jsZW3e/OkHMwNjl7hyY+jsg3kx6IjQWW8uvrvEw9hU2css88Z+J2cjtMQbgLpgTKWpX2OvI4pqi/BtuTfjhmtl0oE3LSutxPvopZhhQLFDMAF2Sm0f/hkw2sX7sdc+58fbP0ryhHN2LWbq9j9S3dejpzTt4xcT95FDYpLpywDq0Rpr28TA7HrFuOx8YqO9Foc3P8Hm0NfDfJ/32x/v98zHJhMJnjCZcx3sN1ALZa/hKNMeviLnPxTFknY2OSBelqpdofP22CPKvH5bzevYEp7grL1cKH2cbktZcn0HCefGzp2SDP06zjjGnudHyv5PmcZIx1sHZtf2wOcGQ5zzGl1/tUnMzh+Xatfx/g8Vm69Jus7c+eTsV2zgu9XI1Nnm9Psk0kM//L8ZoeU6KenSFnXkw585DHs9g+MY/HtR8Zllz+zmnAjUkYxnidKxQ6cwCH5NaXKfUz2QPwVfx443mZf+/nFesOfIUd26ea1flj/gPewCY3W3jDtgLWOd2ONd49dmre+AzAnJHsg5nxfYqZRI3GVqnvpJ4jmEHAH7G9kZfT1cS4mCytQcY+df/dQ1hHez+m5VyVhiPE3MFNX0+nYvK/H9bpPucNQj/yzG4HYpPABTFnTPdgA/kjaqbRAM+7pZN715CsVpHnGXoANlDbxq9PxFYoHvH4Za1KJ/K6O3VkPNbJ5W4FOQy3bPHrWbEtNReSbKnJSOuf4YokTOHzBLbieQE1zcmxzuYI/z4eM2k7HevIj8mUEfx/b5TcOxH39E7FEVXJO/2xCcAB2KTiYWwAmB6lVMcJW9unstCYlIz16zM9XLX2Sfq7W2FWROmJJ328ntxHxiTQy3ZH8t/r91v4fkjPr7s9z07HvXx3oJ4ciw14KtsCD9NznlZ7YYqpK+jqtyDLIVRFGRj6Zee/v38CiUIM2wt8BLZKtXKmjAHYXvvUMdytdLV0yFJQUt1W7pApJ2D95cZ+fSG2tedsrC/J3Qfc1/NrJWwS/5mHJV1JXpu8iekMNBxmjsRWTUf49bewvjNr0YLu28kzKE2aKuL2CIn1Dda/PYBN3nNPUBng9aKnfjKnLSmUD1dgTvJuwlYyh/j9/bF2KedUj4GYv5sl/Ho1L5NZfndKsrrrJy+lnoJjAKbYKNrKizFFxZleJo+rIWslfLst1p7dhSnLjsFPfKghawTWphTWoCtiE8vncL8eNWSNwsaVD2Bjwn08vbLHXYmsabE+YF8v36952bjLnw+hwrIXmyj+EGvH58cUpnv6s0IBPzSzTFU5HZ8B6z9zFHDjPY2/4XUttaDph016q/zDzIy1lYWC8lgaR6o+jU/AM9N6JDYWGOzXL3qZ+q6XgzEe/2Et5P9E26VyyoOnw+meVhe4vBuBR2uWo74uZ0u/vsY/r2JtcNZYd2r4TPYAfFU/mIY+1Wavj5nfDMT2TmYdLePv9sNW3n4H3JLcn84blaGZcjbEBjc3AE+Xnj1BjSNdsJWMW7BVhIuwQe2RSSO7LPVW4C/FNPmrYttD/kA9L/PpsVVfxwaEr2ErXvNjqzm7Zspah66rnKOwwdrluJOyzEZ/KDYBGU5Dk7kPjbPDj6TCrJiue0h/gnUeX+xrxJRMD5FxvFAiczN6PnUk11nh2piFSzpY6osNnL9L6TzkZnnmZXsTv54BG1gO9evNPW6VTksTmWO8kT+YZPUQ259YeTxnUo52xY4G3MrD9UNMAbcvtgJfZU5ayNkU830xFBuUXoDV2Q2xyU72gIueT2WpPNPbZSxMyas1Njl+AJjXr+scY7sLVl/3TdMWU6QNzXi/H749oYf8zzk6q0jvA7GJ/7VeDopB7rKe/3NXyUpkblJRT3p0EJqEaW0aJzKM9jLwY6xNP5FMb+UZZWCZzPzfo3SvnP85K5xF3LbE/O/s73F7E1NYb4kpqnP2hM+Ctf1f7KHHtq2d5N+PLIe5Qt5pWFt5A83bysp2yf/ugO1zvxT4Ow1F+VKYcmHxzPCsRNetbRe6vOM9/7NWN9PwYROPWZN702Lm20MzZSxEz+1kj0fFJr8/GltRHOP5913/fjaZK3ZY27gX1k8WVjvlfrLSGrAkcx5sy8V5yb3psYWByjTCFkwKx5JFH3wo1t/lbk0oytE6/t4uybPsfrIka23/Oxxr56b162W9bGe1b17nnsUU7dcmckZh49Ss0y/8nSHYgtkYvx6MjTOXxNqALL8+/u6iNE5jWMvDcmeSF+eReYJFIqM4jWVLbMX6VTIny162j0+ut/QwZTmYbSLvNEpWAzScjq9WI+9+RqM/WgmbtM+VltfMtLnP69oxmLJqjOfn6WQ6GnVZwzAF3lKYX4oPMYvA6bD+KvcUq8Uq8n8CNbYVYIuCRf6vjrXnz3udzLYY9zhd7eXz4eT+rlj/WesI8Sn1M9kD8FX7YCtIY0v3ik7yOkyzVWm6l7w7M41B1ki6OoY5ET/ep0LGSMyqoa9/74eZt22IaWBPBW6uGc+hdF1BWsEbp+exwcG7ZHSUyfujaRyhtDem7bwe65Cyz6L3OL2JDdqOSu6vQ6YVhsftHmyQuww28Jvg6ZbdaSfy0r1Wc2GD0WUwJczoinenS77PhnXUJyb3BmEdQd3zfds6dST5/bqYue2ZdF2p+jEVq+/N8oJkJQpTvr3WQtyWx1al3yax3vB0qjpJYWjyfU9s9eBBGh7UR3oZr1rZaOaksg/WyW7gdeX/WihL38JWXWqfykLJOodGuzI9Zuo6PjMMy2CD62VpbCs5CxsUPep1p9YkKZE9JA1vbv7T1aHnKEoTGhqroNltkr+3ANZ21K4naRkof8cGTk8A/6XifPAmcg+g+ck82dYuLqeYINfKf38nracbYBOjO4ADi+fY5C13xXwAXRXwS3r6jMHakrppNAe2onxSci+rraSrmfUq2CrglXR1ZPwmFW1JSWbZMe8MmLXh52Qcy4gp186nq4PntExdg5/XXiNMK2DKktrtZPLbBbGB9s1YW3m+31/ay0PVXuihyfd0b/toavST/s4ymDVA6uw09YNUO42a/I9veJpVWl8wsRO5q7B+Mg1fZT/ZRG5oIr8/Nds3rG27EfOndXghE1MGZ48B/L3xmLXqLV7vCuuHU6lxnKK/M6JUtvfGlFz3Y4qZKgeY5SOwT6WxdfEurO/6ETUU5yV5R3lcezwBq5t3twI+xrYqtOp0fGVK1iiYkrpYmDmDDL8A/tvlMaXgc3Q96nMbT6tsn2rYVrCPMCuQc5P7m2H9Q9YJVO3mfxN5x3ma7YFZqmyHjelyFOeFddMwrJ27A1PAzen358GsoLKseqf0z2QPwFfpg3uepZsVNWxC+3d6cICS/HYh//312EpLcVRcoS2fD5t092iqCixBw/zoARqTiGUxDfWtmNlcZZgSmU0nL9gEYF1skJR7FnofbLDVH+twD/PGejXM+iH3nOh1ku/rYsf5/RVY1e9dQzIRz5C3EjYpeQKbKBVmancA38x4fxVvyNb1vEo7//OwTjzHod+d2N6zVCmRdkTXkTlIokOnjmBbenZ2eQOxgekhmDb5Jf+bs9/1WBK/An6vL40BU524pYP1fh62szHTvUewAW/OvrsXsJW+UfgkB1MwFfXuFjJWlD19lqYbxYOXqxw5i2MdYers8gTMm3th+pprDn4ZEzuqK+K1MNZZ97gqgWnqX8aUGr+i4XtlBkx7vycVRx+WytGONPbwh9LfOvl/HiWrL7oeWXZjDVlDm9xboYV6sjs2EW3q3BRre3O8cO+MObtLlYsnY9uysk/mwfan747V+S/qWZ38T37/WPm3dJ0QXp8Zt2UwZcgyyb1C0XEdNuHKPW+8aJdW7CZMWeWJ5u3SBh6nm7D2LactWd5lrZLc60NjgeEi/NjnCjmb0tWZ8jw0rHj6edl8nrxtEztj7VJhmjwOm9w8gSkDc9vJeTBF21zYKuSS/r1Y6b41M61fxPqMiSaw1Osni3bpVGx1dM5S3i+J+eXISaMVsEnnmi4n7W9HYCbhOU5076SxRaU/NuY6zO9/H1MqZy0uJWWp6TYibIyYk95Fm/RNvx6J9d2vexm4kUx/VUW6Jt/nx9qlZWiMM58Cds+VV5Jd9sn1v+Q5rr2gFK41sHbk7/ix6uQ5MZ8NG4NOWw4PtnB4DfkWQSPpujB4OWZFdSA1nI4nMuak61hpF2wiPwcZJ3N4OS58SgzExrupkvgeSpaMPchalMZYZFrMguJx3KoM6zcrZVEaNzDxNoes/PffBhrt7BhMSfbPJP9zt1F/UZYwXzpnYdYhJ2NjnSdy8m1q+Uz2AHyVPpgGq9hftRzWebxCYx/uEiROzCpkPeYNfbFCfkTyrC/JUWYVch6i4bvhQkw7d5PLHYStmA6tEcc1aHI8JY1B4BLAB1T7dRjSpMEYjSk9Ls4Nj7+3MabEuIiGeesgT7cPsIHJnRlhWgQzYboQGyQNwyaVhWnZhphmu0rO1lgHNh4bpF2KDeSK0xR2xwZKVXI2BV5KrmfDBm2jvAyMwZRBOYOkjpw6gpmBfx/bWvIONmFf1fNzOLaCs1iGnA0oHddE1xXKpbHBSO4AcB9sxSXt9AdjnfdmZDjRwwbIL2FOQCeaxHhduTsj3+bz8ngOpgwqnAENxyYUX8M67Co5W2Krc6d7Wt+I1b9V8TPdc9LHf7c5NuAu8nw6D0c6qcyxcngF9y2Bmdlf4GmWvX+3m3L0PboeLzWuRv6XT8GZlq6n4MyHtac5stbGlK/bUxp0JPVkgww5m2OTwG9hbdDS2F7gGb0MDMXaq7plqVAsb4IP1DPjVS5L12ImpamiMtf51kZ0bZfGeH4tiU2cZiTjTHa6Kqc+9PRJFYPH+P2c+JXL0/fpejzcwlhbVRWmZu1S0aesjU3qdqN6y1O5vT0HG+QWE4ChJFsEK2SVnSnfgPUfZ2BtyhAyjnVrUpaKLS6ze55tQZ6j2c2w1fALsNXWq0lOQPG0fjEjrdP29rDkfjFp2pmMftJ/W26XJngZOMXvDSRj1R2rt69gE71H/Psu1PQxwsR999c8nRfGjp3bivwtM03Lkj/r4zIfzUjvcps0Dpv8DaNxrGOdrZOpM+0lmjzfgsz96jRx8E3Xvml+ElPzHuTs6WX8NbqeHLYtjfYyx0p1B0xx/DJNHOZiE/pcR+E7YOPaF7Fx17exif84bOX93Bqyjqd02pDfH+Xy36Ri8u5pcRemRDqgyfPtgSczw7ML1nc/RLJ91+P2hocpx4p6H+CZHp7PDzyUIWdDfBGxKEf+dw0aTlAr/aiVytLrdPXvsghmpbIDmduop5bPZA/AV+VDwxlZse/zVaxD3A3ztlpH67sWyaAG02B+n8YetVnI8E/gjcxDNFYzfolp8A/EnBb16LSlG5nP0/CgHEiO5PF7G5E4s+tBzk2YwmVsci9g+5yKCXzuZOlRbJvFJUx8DFo/rDPPHZR8C5vsnURX8+s+nm47Zci5noYzr/6YF+DrSFakcU1phZzj8LOyMSXEnZhW+ywaHp2HZaZRR04dwUzXCmeZw7EO9yUynZQlct5OwrOlx+nXwKlJWch16vUxNlA6FRt8VaZtN3K+jx0vtgCmEDqhyW+y9sphlgz3eVk43fPvChqD5RzTxvtoTCT6Yvudn6Hr4Ds3POfQcFS3k5fHjzxsuWVoGeDCpFz/zvNuQWzgkO3Mq0k5esk/J/q9gTXyv7tTcM6gYbKY5dQLW3243t89jxpO00pyHkzy7jRsVfunng+FUqCVsnQG1qecRcM/RKtl6Vl8wlUzbt/GvWPTOKHjOU+vjWqEqbtJYNEGDCV/L3+P5amQlyGnu3apbvvWdntLz86U58SsAG4n05S4SVk6E2uXLqOGyS6m6NsgKUtnY2OLdK95TlqX29vjS8/7k9dPLk1jq0XaLo3BLM6yt19hE7/CT8EAL9v3FmUyzZsKOeW++26s7z6dGs7ucssSeePBcpt0n4fpXDIUSU3klZ1pH4FZZhZHBi6GK60q5PTk4LtP8puqLZSDPL9nw8ZxN9I4aSRVVFbl3SBsYr4wZip/OzZx/jpmcVqnvg30OjsGU7huiY1TLqPRD+ROcA/ATzBL7s2SfL+h/LybuL2HWRWsgfXb40u/WZyMdtfj9j6mqFzQy9PR2BG43/DfzE71tqdBHo4iPZbBfPLsjikdi0WYqq1qg4B/YScqPUyL48AeylKlA/Wp/TPZA/BV+mBmgOdjqyXpHqKRWAeea2YzGhs09qExAbkYN7PzRrzSg7Y3iBdjE9EbgR8mz5bEVs7rOGA7rCTjSmz15KfUOGnAG8a/YCacZ2ETnDlbTPNDgHv8+87YwGTzFuQcmsiZw+Vci2knd2pB1sMknSm2+nYv7mgqU87G3qgt5H/nwJREp2NKj1xT97ZPHaExuL3Ef1+YAV7n5f0h8q1vFsQ67PMx5dnLmCJqSWySU8fHSOEAbAy2OnkTNphbI0nDHGd1G5A4AcU61Udxs25PszqD7gWxScBgrC7/FtPkL0H1wKhYfToZWwWcMXk2FzagH1+zTBarJLNjDiYXcFk3Y35KcifcQ5N0T4+MnMXl9DiwTcrRxU3K0cZejraoEa+cU3Byj0Edga1AL0RjG8/lng8jsBW+Hrc74V66sXZtAjYg+rOXh5GYQuAB6p3MUS5Lv8MGYEtQMUmtKEujMZPNE2qWpZUxhfly2ER+Zg/b3tigMmdL39J4PWfiSeD3yLRSyyhPD+aWJ7pvl5bCFOkXkqcoqWpvN6fG/mt6dqb8JPWcKTdrl17wsjQ0I637Yoqj/enqt2Q+rF3KGgeQ0d7WiNPQoj5hdbfcLt1OXrsUsHp7BO5Dwt/dHFMIVG69SORtxMR990xY/b+P/L57UeDyirKU03f3wcYNE2jeJj1IzeOHXXbqTPtczJl2XQvTHAfflRYimILsav8+BzZ+y3bsncg5FbjOv88M/A2bJJ/mdXB0DVkz4UfYJvfGeB28rkY56EvzE8yewSyEhmHt8ZIVco4Ebkqu58HGktP59YLk95fjcOflmB+eTzBF9b6eTrmOoo8Hfu3fR2Djk3Mx5ft1ZFhfJXXhHP9+kefbuUk5Wo/8E7qalaXLy/Wqbtma0j+TPQBfhQ82MF/TK+Op2GT0DXy1Heu4c02I9sMUAQNK9zfyBm0r4MEMOQdjneEM2ER7ZRIrCWxFv1JO8vuATdiew1bqb8AGcYOwwcHPyF9N2gLT9I72+E7wBnt9TBOZ1dBiHeypJI7tsC0dz+Dm0+SdxtAXM68tLEBOShrKjTDzwDrnzg/EzHgPxhr5Icn/eZR63mWP8/DcStfjBp+mnnliR04dwbY03INNvK4ryhBm4nlaTiOLTSAWxDS/75OYxHnjXTkATH4/fSn/x2IKhitd/p9J9jD3IGcYpdVszDHRQ2R2jP5O0XkNxxR5a2OT059hndQt5Ds8mxOb0G6PDYyLPY+zYCaLdU5RKY5QOhRbHUnLwqNUKzkOwx09QVdfB/59PBlnTCe/3x4bpB/XpBydnlOOSnFr+xScQlbyfSQ2mTsW8+/yHyq2mSVpMxZTvl2O7SdN92F/jzzz60JWJ8rS7JiFzHbtliV/bz9sgv1oqf69SJ4DvWlpWKItyMSTwDtqlu9vYu1Sy+UJa5fmxSZI7bZLbbe3dNiZMjapLDzwt1OWVvffr1UqpythCyc5JuXD6EB76+/1petWt7Sujadeu7Q61ndc6HXubr+/GFaf6yg6jqb7vruOQ88F6aq8ScvSOzllyd9bAmuPrqTFNqmJzNG06Uybagff75B3EsZKJONGbPHlu9jkMms7l783O64UxRRnJxTlytOv0n9GIqsfNlZ+gq6+XQZ4HcrxCVHUr8NofoLZVeSfYLYEfooX3j5hbfjC2Fjqe2Q6CcX61/sxS8cnaVgJBWyxcEJOfcGUNndhytwf03Ds2x9TlmRZUmNtW7rNZbSn+2897X5K5pjZy1Lq96ylsjS1fSZ7AKb2j1eG57GJ9l34EWHeWNzulepZ8vZwj8I6+GKvfbEiMMgr+48wR2g9mqe5nA+wxv0WGnvur/dKdSRm6la5170Ih/8dhGklL8L2tg5LfnMF3Tgf60Zm6jBtaWxCf7I3lJV7pZJ3i0FJ4YAlYBP526i3clNYggzCNJmpQ6dLcXPxGvKWx1ZyLsAsMr7l1znKoP5YR9Ef0yCfia0Inotp88/Ez1LOkNWRU0ewAe3y2KB0XS9DX6dhUngG9VcmBnvez5TcG49rhnPzrPzdr5fETJWvzZCzGd2ctuBl6ZdUHBGY/H5gkr4jPc9/RMO0u8cVhEROsdK6KjZ4vBTTwG+MraLd00IarY8Nkn+MKQXXxJRAd1bIWAz4zOv4RFZW2OQp+1QXGkqXVTEl5VatliM6cAqO/3Y5Gg4mQ+lZf0xJmbMX9HT8iCx8EuF1ZR/MGeJpwK05aYSt2BWTraIsvVWnLNGYJPf19L4PGxS3UpaWw9tUzDJkX6zvu9PjeHZVWUrisgJdJ0lpOR1P5iTQ0+lr/n0FbOBYuzx5Oo2mUXeXpqs58Xgy2iU619521JkyXZV/M2F9SK12qZRHe/j7V2JjgkU8rW/JkLMZHWhvsZXeq7Bxw00k1kNeLhaj5mlT/u4amHJjdXxl3ONWaV1IV98tS3o6/wBbba3bd59ON5OpOmXJfz+CrkqXY6nZJnUjdxps0teqM+2OOPim69bZvjT6mAW8jGyfGZ6JtsaW6s4VZB5hWnrvQM+vY2ksXN1Esn0xQ956dH+C2V1UbJ2g0ZcUfgWKNDobs8a7iMwjjEtxW4WGFdVov3cpcEyGnLRMrokpX1Ll21VkHD9LcjQpNgdI26qlvBzl+qlI3+2XpFetsjQ1fiZ7AKb2D2YKUxwptzE2kS+0m4tiyoUshz7eMBfH94zBBjH3eSO0MNaJVA60vOEqtkdcTGKqhw0CDyZj1TZ5ZzrM8dNiyb1Fku+zknFkETaBnJluOhtsP+4fyTOZnZOuxy+lDVx/TJHyNNX7t2bA9mxtkdxLHZXNglmbVMVtFmwlIdX6D8JW9L/leXsSFdpNLzMP+e8Lk6t+WMf/Lcwc91gyTKbp0KkjNAa3dwP3l54FL6u/7O7/JL8dSZNVC7p2/u/m1BevD38n0fCX8m0Atnex6pjIIo26/Z+4iXCFnPLgtthHekw5zSrkFJ3WFZgpYB+vfwdjHf+12CA+5yjFZmk0PbbF6CxMQXUR1dYJ92BmwJd7XVggyft+mKJi04zwFGn0HUy5uU7p+fw55ch/25FTcPy3S3r5btpWYO3W7zPK96KYlcQNdF21X9bzrdi/3qNvCGwi9LCXgR/gDnkx5UudslS0Jdfi/QY2AdjR8/76GmWpaRph7fBm2IRpN6rN5ou25A4PW7qK3wez7MiaBCbpdDW2H3/X5FnwupTTLqXpdA0TK5QWJKNdorOnPHXEmTKlNhc/vtLz/94aZaloS/ZN7g3H2ru7PA/ur4obHWpv/XdPYhOhJTDLq195GJbw57ntUtF3N52QYUr9X1LdlyyITVz2ozF56+/h2M/TPLfvTtuSeZL7xeTm1pyyhCl67sL6jhdpjAvnxOr/rWS0SYm8BbCtSWfQUMDOSWvOtDvl4HsTT/cjaLItEbNeeo9q3wljscWfqz1sC9DVYm0tbGEnxx/XEh6eu7wODsEUAidjSuEHsO15ObLSCfY6no9/w63lyDjBDFO2Xon15fOVns2HKYNezgzP4p5O12BKgEJ5ezjWbt2JbVuoyrcije5uVrc8vXPkbIbNP5paj2HWXD/JjNuKnk73ldPJn2+XU5am1s9kD8DU/MEm0s+Q7PHBBujH+fdRVOxvS96bCfgTjT1A92GTiO2xwdppfr9qgjwP5sSnWBVZExsMfaeNeN6BDYYf9AZ13eTZQGzgW+nlHeu8rsS8J2/S5PmpZBxZ5b99HNihh+dDyDCdxAakl2OmXluXng3HBpk5cXvbG5rdsAFKl2MM/W+O+ddDnu8LY5Ov9TFTwvQYpFxnlR05dcTzvRjcXoTtx7+dhnPOGWji5bmJnHcxq4Etmzzri028Kh16+u9Xx47N+9A/hfntUv53RvJW3W6hcbzREtgpH/eQePInw2kSEw9uf41ZyaSrnDnnHn8Xc3K1Dabl3yd5Nl2unCZp9Csajr3mxkzO+1SVScwj9LPJ9WnU3HPfTRodhnlwf4DGBGDmnHLkv+3IKTj++4fwySg2iFwPU3qkx/3lTHDvw1aijsEGMKkZ74yYmXelGbCXgQOwPmFFbFD1OollWmZZKrclG2Ary4umadViGm2IKUiXrVkG0onyBdgA7TYaTv7WJP8M9TSdVsKs235CY7vbqJzyhPUBRTpdia2MfpvGfuUVyDsB51o60952zJkyPbe5hclz5Z5purYlv6bRlsyOtf9DyGsnO9XezgV8N7meAbPgOQEbL2X1kf5u2nfPStdV02JyO3+GnAWxCfC72KRxS7+/Eo0xQG7ffR+NtuQ9krbEny/fLE+byHkMM9ufHlMk/gNTUi6GKaqHk7k1weW9RENp/gamsAiYZUJdZ9qdcvD9GNYHXEBXb/zpAkNOmfoltuB2JtYmnYm1c4M8nU4k39Lhh162b8X68RkxZe4M2BaPZchTLK2DOzumYSncj/onmP0IU6ych215HItZrs3pzx8hf9tEOZ3O8XRaFLPQ/uJkq8w0usXDNBc2nyiO6z6aHsb4iZyX8fkE1u9viLXhY5P6t2pm3LpLp3SOl+VEc2r8TPYATO0fb1DTfVtL4fsasU4h6yxX//2c2OT9j8ALyf1h2OCp0suoNzYz+/eiAVoY09zv1EL8NsM6kUFeyXfC9kw9iE0ABpDhBBE7SvFJbOCxEzYx3QVbKS00nLnOYLYm8WaLWQGMd5nZnl0xk/2n/fuKmOPKc7DB3NzYhGvbDDlLeIO8lef5HZi1SmGpMg8ZKwCYRvbx5PojTAt8JGamvlWNuHXk1BHMtPlRJh7cfgtbQTkgU87ynv87eaP9CF0nXLNSY2Dj73zT03ZzbJ/cH4EHarzfHzMrLQa4r3qaHeBpf2CmnNE0H9ye7H9znbFuCLyYXK/qaVY4CBtJTcdZmEJg7iSN/kw93ynr4ubH2ER9HLZCcmHNcIwGnmiSRsUEIMspZFJP2j4Fx3+7MqZsWYmG46vCEeNr5Dv025zETw42GLqKrk4Qc/bxD8W2uCxdur8rNgCvdMbrv18DeCy5TtuSt3Pj5e+u4PW+nEanYIqOyolNki8P09imVm5L6uxNnh6biJbTaSdsS0CuyfWqwCNN0ukwbFK4WY0wHUpnTnnqiDNlz7cnPRxvMXGbOxf1/PlsR9f29i/U255YtLeF0qWl9jaR9zSm+JsGazufwiZ+D+OnGWTI6K7vLrb2zE29rZNbYMqktbH24zOSviFTxmY0b0uK8UTu6TCzYH13ar15Ctbm3kn9rSAHk2zXwPyVfLuOjOTdTjn4PgjfZoVNsj/EHTInv8lpd5eja3s5k8f3DhqOQnP7lENo+OCaBlv9v9PLa7Zjz6SMb5TIWpTGwkJ/bCtLznGhjybXH2Jj3IdptFdZ44oe0um2mvlWTqMPsEWqZ2j4UMjJt2F0dTL5nOfZldi4IsvyJiOdxlNjjDK1fiZ7AKb2DxOb2g/xynUKNTuSRM5KJKtr2KrNy62EjYaGfRts0L1UTRmb0TCXTTX3Z1PvOKY7gdX9+7HYoHZ7l/PFILOGrGs9rQ/z9/d1WfeT7zl3P3wPG9Y5/gAzyTwTcwSUNZDwRm2F5Hov7wguxBROr5F3dv1CdDXXvDh5tiP1jy7r1KkjF2MD23YGtzPhq1pJev8CUwJNhynMsrfhuIy1iw4AM5f/DPirl42+5HVIK2ErGyuTHKGHTVLPJ98a4Ckvf80Gt1ke3T09v40p6QqT2XtxRRKmyc/ywZDI3KinNKp4d0OatBeYcuB+r3N1Tol5ytO0WRqtV0POaDpwCo7/dhTWVp/oZTAdMO9KhnWS/3ZcUnf7Yiu3t2GT0zqrpcHjdgUlBZun99GZclYtygrWllySPKvVlmCDyDSN7m4xjab3fLoFa0teLZX9y3LLE6Z8383LcbN0OjZTzo703OaOz5QzN9beXk4b7S0ddKaMrYqun1yX29zHyGxzaRzp3FJbkshZCTP9b7m9xVZtv06j7//I60txJOKxwJGZ4RlG1757X7r23a+T0XeX4vEU1gYsgG27+wVmLZjb365G17ZkNC20Jf7+tfjpLl5Xn/X7l1CyyKyQ0xezSEq3ma4KPJNcn02eJUBHHHzTcJI4X3JvX6xtma8Id2b8Rnl4vk1X32CrYWPVXN9HAbMCK7YFHk/DsecqmAVVrhPNI4AfJ9e3YcrSH5DhVyB5b0GsTTkSO9XplqSsvkg932czYYrtcjqt7umUYxXcUxqt7Gm0Yo0w3YG1KUcCJ/u9uTzO+9aQs6DL6i6davlRmxo/fRCTlOglzr//O8b4N2y15WhMe96KzBdijM8ChBAGYI4dx7cSthjjf/37bZgmfnhNMS8B04cQDixkOScAo0II43p6ORj9sYHaMyGEQVjntkaM8SbMpO9P2AClR1xWP6xh/Qgzvz4A0yBfgg0k/pEjy3kV2CaEcDFmFnhAjPGHMcbDsS0Qy2eEaTS2ovTHEMIQgBjj5ZgC51eYxvUvMcZHMuQMAf4eQpgW6ziOSX4yBBugVBJCmCuEsCbW4f4DWwVcPYSweghhemzAO2uM8Z8ZcpbHOuj3sUHbU8lPlsYsQv5dIWdRD/unXhaIMZ6MdR4B2w84IMb4fEbcFgshrBdCWCTG+ATwdAhhC2zCcyxWvj+OMf43rZvdycHOLB6ATZDX8nwA234xb4zxPxnhWQkbTM2CpdOW2NadP2NKvCUy4rUhQIzxghjj/3m4wAb9c3sch8UYX8+QtXEIYQ2X9xCWRltj5aBLGvUgYwim5b8ihHBxCGFxvz8wxvgh5s9la2ySmBOepTFLommxNPoGXdNobJUcl7UfNhm6Mcb4eVL2HgVmDSFsZdGO38uUNQJrRwpz+eOTn0xHRp0LIewL/DPG+Ab2z/8bY/wNprAcAZwXQvhahpzDsBX2hzBLs1+FEHZNflJYiFTJOQQri+/5rWexgXtBnbbkEEw5ch7WXv4EmwwU5KbRYVi7czo2IL4C2+5SsDTWJvXYlrisg7E+8Q7M4uZXnpcFfTEFRZWcA7HJ4ht+61ms3y4YgpnQ5oTnfGxF63WsLVqthfZ2FDYhWozGCRovAbOEEH4aQjgSW5E9pnspX8haG7NS+VoIYTB80eauQqPNHVTV5oYQ1gkh7IJt1XgGa0s2w9qS48hoS1zOui5nXn/nfiyNRvtPctvbmbH9+2v7O3tjCy4nxhgf9bjujCm+eiSEMA+Wv38JISwC4OOIb2LbOp4B/pzRd88bQlgxhDAf8DGWd5th5eC4GOPcwM+r8t9lzY2Nh/4RQljI25IPsfHJDOS3JfOGEJbElBDFqRDzYvmGx2+pKjkuK3j+Xgn8JoRQzCteAIaHEOYOIRyPbe/7V4asiCk5DvEwrYI58Pt/ntbPYivxPcnp4/3kcTHG90MI0/ijO7CyfVIIYVBVuSyIMX6C1f3RwGYhhEVCCAN8HP49MtPK43Yj8DNPp/swK0G8rj2K+RLK4WMsyc4PIdwM/D7GuBqmdNzDx2Y5fOD/dzjwf1gbRYzxR5hyYO5MOcQYf48pOuYFNk/S6RksnRbv6f0k/6+JMf7Ux4P300ij73lYF6qSk1zugo2XFgFGhBD6xRh/iVm7zFgjbu9hfe8ImqfT6FxZUy2TW6PxVfxgDcapHZATsMHcfu3KaeGddP/ZMth+uR9jGtv+WEX9KRV7pWi+H3Fo6Tdv4NrKGrLmwJw47llXViluy2KmobvQ2Os6DebkZdEKOTNjjehzWEO0XRP5/0PJ63c3cl5I5Gxfej7Cw7N4Rhp15NSRJG7fwxQ4X/f719A4KeSnVJwU4nJewcyTv4gDXbcJ/bVKTiLrZcyJz48xjfK8nudv1CjbhZx7sQH7SKwjfAPr3I7F9vfl5P8r/s7znk8z0dhLOhTrzKvOhh6CmRC/jq0ejk2ezejhepeK010SWR9hk79LMWXXQE+vlzLTp9gqdYCnxZ7YFqejvAwN9eeV1k5JeN7EtjfNUdSxpGxXppH/tiOn4JRkpQ7PhtM4z36op3uPdS6RMzq514eGWX/hFK3KEeMobAvAXMm93bDB8d2YwuqdzPD8ioa37cFY/S/SbIjHK6e+FbJSB5NL4m0+phx6MzNMHyZh6I8NGJ/H6t9BZJ465LLe9/pwq+f/yh6OZ7HJXG46/RxbVXuQxl7iouyPzEmnJDxvY5ZzgzzPb6fR3j5D3ilPHXGmjLVLb2L1/xFMoTkNptQofvPXjDSa2dPgYsxn0WKY8uhD4HtV4Wgi51KvCzNh44hfeNk+ioz21mVdgnu4xyakj2CriZdjVgs7kOfzaBYvf3d4Ht2OKd3S+vd3qvvuWbC+9nZsknyr//2EZHU5M51mwfrbOzAF1e1YGzyXPx9LXltSxO1uzKroPExpOos/n8HzMMtalYZT7tQRd+H8dg/M+uJV8pyNdsrB9/RY2zZRv+Fl/QEPV5aFob/XD1sIutzT6xxMyfAhLRyp2UT+HF4Pspy0+zszYkrY39L1NKzz8a0YmXKKsfesXh62xSyw3iHjCFNP06E0LGc2biWdsD5jSHf1KjeNkvAs7deLYouuz2IKneOwPiXHwXMha6HkXkvpNLV/is5RfMm4BvXzyR2OVgkhXIhNRI6Lpr0lhPBtbGDzJtYw/CLGeGCGnEGYqe5nfq9vdM1xCOFcbKVk38wwDcLMWYswDYlmFUII4Wxsn1uPshI5x0XTuBJCWA6bOP8KM1P8Q4xxnwo51wLvxxi/E0LYAGuANokx/tSfL4p12te1Ksc1sUdjk9SjepLTRNbGLmv5GONnHp6/Af+vSL8acs7Btqz8NoSwO2Yq+lKM8aUKOVcDv4wxnhJCOAab3ARsEvkENsBYLsZ4VkbcyrLmwiwwfge8E2N82Fcl/l8NOcd7mH6POQkcgq1u/bPIx5py/o11rE9hljJzxRhP6UFGiDHGEMIB2ADpE2xF8yVs8vYtbJD6aYzxGxXhKcv61GU9hU1Gto0xvpCTRi5vHKYEOBL4J7ZCNRt2bNatGe+Xw/OZh+c5bMVnf2xisUBPaZTIuxr4aYzxzBDCGGzwNxYbzN6LmUD/J8a4e01ZC7msYnvSw5hp9uwxxiNaDNMPMdP0t0IIC0Zb/ehJzp5YPd3FV56Oxiaqo7A6chrwt5pylsZWs/8HGzA9jbXbw2OMh/Qkp4msZT1Mf8MUL9/FFCczZ6RRKmc5rDx94vFaArN2+WF0q7wKWdcAr8cYLwohXIqt/J7tz9bGLBb+FmP8WQ05F2J92Xn+bCBWX74WYzyshpxLXE4RnqWwOvh5jPG3FXLmwZS+K8YY/xPMwuxCbJ/x0T2920TWJcBvYoynhRAOwlbw/4EpER7DVsCXqWpzm8hZC+sfl8FOiDg1hDBNrLa8SOUcillO/Bz4X2xieDXwPxntbeGMrl+M8egQwjuYEuZlrO4ujFviVI29QggXeZjOCCEshinsfuPxOxWzXFgpxnhVTTnnYMrbjzFfWr/zFdz/60lORphOiTF+HkIYE2P8SQ05S2Bt+C8xxe7p2FaFBWKMF1aFyeXdAfwHy6vZsPHXI/5sLNZmHlrUn0xZxYlfx8QYH/dnA7Ey/5to1jQ5cqbDFt2+CJM/XwhTSjxbIWcLbFHg98mYdDasXxqA9enPxRifzohbIet3sTTxCmY5cw7wUYzxpCpZTWTPlIxV58QsH9braRyXhOeT6JY/bhGwL1Zf+mFbYK7P+P9XY2ndH3jF+7s5MGXuQDLTKZHTF1sAOBv4Py/bg/36t1VplMjpB7zmbcswTFE9BLMmeCU2rM9y4/YapmD8L7a9bFFqpNNUT10NhD76YAOGj7E9rT+itG8Xa2yHUr0HuyznsNLzxTBtfM4RUWVZR5aeL4DtU6063q8s54jS80Mw89uq0zSanfBxIY3Vk9mxvZ5VDnOq5IzCvM5XOoTpRlbtU0d6CFMhZ9pMObNhe8+m9eufYFtlNsc613P9fo6vg+5kfd1lnZ9ZtruTs7XLybIs6kHOFi7nLL+f6/F6nKf5sl4vXsZWJQrv99mehZvI+j6mdKn0MN9E1m40PF3/Ahuw30um07NuwvOKx22L3DSiQ6fgZMo6w+9XtW9VcrLKpL87O3CFfz/f031ubDJ4HvkOwZrJmd3lXIBNCnL3Ffck66JcWd3ImQubWJ5aI27z0IETjHqQc2Lpdz2ubnYqPMX/ogPOlF3OSUUYsJW1QzCnw3th1gY59a07OSu4nIvalLMqZvl0NhntfyJvDLag8ShdHdj2xdrjylNHaPgDOD65dy5myn0LmU7hepBzjMvJdsCXEaasPfMZcgoLmNx+aTO6OuXeEVNyPIhbvGBKgVZk7URrDr57ClPlinQiZwfsuMkbsBX34XXKYoWsoaXn05Hv12cLbAw2UR55fbqECh84GeHJdu6MrdI/iVkPrIRZYo1tIY3Kcm6nq6XKdORZYJXl3NFKeHoI0+LJ82zfbl+Fz2QPgD5T3gebdB/p31fFBjXfp+Ec7iCanNGaKeelotMAdiff2U2VrD3IOyKsu7gV2x32JWP7hf+2pxM+7sdPD2hTzoPUM23ryKkjFXIeqCGnMP0ejCtJivu4f4AacetO1lDM9HWeDsh5FNvL227cHicxn82U12zy/jCwdh05TWT90mXdRw1FgMsZgw1gXgMm+L2sdM6I2wNkOqx0GXPS5ik4mbIep3SEWYtyHskp35jFziDMgdrr/t6Q5Pmz+FGIbcr5HvlHaVbJej5HVqacyrj5bysn3eQpJ3uSU8dTeUfC002atexMmZ4n3S+QedRnD3L6eFlavk05/VxO1okcJZmjsJXE0zDF3srAD2q8vxi2JeB6zOrhdb8/GrPEynXG25Ocy3PlZMi6gnyHpT3JuYpMhaK/sxkdcMrdSVlVcmrk3emYMmJHTOF9Eab0HuTPi22eOW1Kd7IKp8GVx87676oUAQOAcR2I2xY14nYfyRgE205waXK9VmbcquRkOWLsVHgyZa2Rm05fhc9kD4A+U+YnbZQxU6CdMNPpnwAfdEjOLzoUpvfqyOpg3Dpywken5PTWMHUjf01aOLlkUsqa3HLo0OR9Esjawjve4uSJViZJnQxPR07B6aSsDsrZDtsOVJwV//XJKacXh6ntE4x6o5xuZB9HjUFy8l5bk+7eKieRNwN2ItPb2CpjtnLS3x+Lbbs6iMaJKBuTnGT0ZcrpjWHC9vDfRulIT2w1+U4yvPrXkFU5Uc6UU+WzKGBKscVwXw3A1zAn6s9ii0qnkxzh3QFZWWMmqhUTG33JceuPHam6CF2Vpj/07/tiDhY7Iee6DsjZpyo8nYzbV+0z2QOgz9TzwVZi/06NI94mpZzeGCbMvPBzMlcCJ7Wc3hgmbMvEjzqU/x2R1Vvk0IHJ+ySSNdT/Zq+4TcrwJDIHYHuWO1GWOiKrHTnYYPD/t3f3rFFFQRjHnzExjRhJkUYRFBWshBQqghDxA4ivhYoW2ljFyspeUmpho5UgqE3ETjGNjVoLNn4Av4BiYQxjcW7IJYibuzt37+zu/wcXgxce5gRSnNmzZ/arHJv+oHKp2mJXOVlr+kd2X5vuzDlBfx8Dbbqz5tTyZlVOCR0I+F3NSvqihqe42srpsiYFXcodmRWYUz/VMLMl94jKnUVr2t4l2CFZCmpMtLm2Le8eq0wP+azeF9emyonOmqSn8wJ4xudR+YRpJUtOxpoUN+EjJCdbTSrHbo9KWgqoJyQrYc6e6t++N+9tZEU8wWsLmYITmRVcU6OZ823nZKxJQUdRs+VEPgradGfLaeH3NKU+7plpK6fLmlTuTHqi2m37kpZUTnG+VPm62sNhZrWQM1/7vxltnjBalvSi4doGylJcYyJ6bU9Vm7ihzZMSt1Q+XOp5X0y2nOisSXqY8oAwZjYlaZdXUxW6zklcU8iEj8hJIdlqqs0jHlhUVrYcAACGycxOqNwD8kbSKUmv3P1B7f0ZldMBP72ajNB2Vos5z7027cTM5lROPLxz918N1zZI1sZUtfu+OQ1tRtK6u6+b2bLKvT5Xk6xtQaXJcdzdf4xKTnTWpNnRdQEYH+6+HrHhjspJXFNIEyAqJzIrMCdswx2VlS0HAIAOPHL3OyonAE6a2Sczu1K9W1D5ZPe/zYQWstrIOW1mH83sQvXupqRvvTbckVnVBveyyqfiq2Z2T5Lc/XfVTJhTmSZzO8Hazlfvjkm6sc0Nd7ac6KyJwQkFAAAAAD2Z2bS7/6l+3inpmsqGdJ/K19UODztrCDl7Vb5PfyhgbdvOqhoKZ9192cwWVUYPz6uMnn5tZnclrbr71yRr26cyxvrgKOZEZ00SGgoAAAAA+mJmuyV9Vxkf/jZDVracfrMiGwER9bSdlS0nOmtcTXddAAAAAICRdU7S+6DNVlRWtpy+sjaaCdXPa5KemdmKqg3usOsZQla2nOisscQJBQAAAAB9GefLtJOu7bqkS+5+MUM9kVnZcqKzxhUNBQAAAAAYAWxwkQ0NBQAAAAAA0BhjIwEAAAAAQGM0FAAAAAAAQGM0FAAAAAAAQGM0FAAAAAAAQGM0FAAAAAAAQGN/AXXgxn7APOa6AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABBQAAAGcCAYAAABpxPOOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACQGUlEQVR4nO3dd5gkVdXH8e/ZxJJ2l7CEhYUlZ0lLEpGcQRAMIMiSJUoSBEkrSYIiOWfJOQeJkpEVFAQMCCog6poQ46ty3z/OKbqmtmf6VncvMyy/z/PMM9PV3Wdu5Vvn3rplKSVEREREREREROoY1N8FEBEREREREZEPHyUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFEREZJphZi+Z2Vr9XY6pzcyOM7M/mNlv+7EMa5nZm/31/wcSM5vOzF42s7n7uywFM1vDzH7ay3tzmtkrZjbdB10uERGZtiihICIiHwpm9kszW68ybUcze7x4nVJaKqX0SIs448wsmdmQqVTUqcrM5gMOApZMKc3V3+UZKMy9ZmYv98O/3x14NKX0dj/876ZSSo+llBbr5b3fAQ/j5RYREWmbEgoiIiJd9AEkKuYD/phS+n3dL35YkyiZPgnMASxoZiv19qFIPHS7/rMH8J0ux5zargK+1N+FEBGRDzclFEREZJpR7sVgZiub2SQz+6uZ/c7MTo2PPRq//2JmfzOz1cxskJkdYWa/MrPfm9kVZjayFHeHeO+PZnZk5f9MNLMbzexKM/srsGP876fM7C9m9raZnWVmw0rxkpntZWY/N7N3zexYM1vIzJ6M8l5f/nzpe+sB9wNjouyXxfRPxe0efzGzR8xsicoy+aqZvQD8vVlSwcyWMrP7zexPsay+FtOnM7PTzOw38XNab93kY54WLr2+zMyOi7/XMrM3zeyQWL5vm9mWZraJmf0s/u/XSt+dGMvgilg+L5nZ+D5XPkwAbgPujr/LZXvEzI43syeAf+BJh8VL8/xTM/tc6fObmtnzsS7eMLOJvf3T6DGyIPBMadom5rdAvGtmb5nZV0rvHRzz/xsz27m63PpSZ7ux1rekPBPLYf6c/y0iItKMEgoiIjKtOh04PaU0AlgIuD6mfzJ+j0opzZRSegrYMX7Wxi8OZwLOAjCzJYFzgO2AuYGRwDyV/7UFcCMwCm/5/R9wADA7sBqwLrBX5TsbAisCqwKHABcA2wNjgaWBbaszlFJ6ANgY+E2UfUczWxS4BtgfGI1fUN9RSUhsC2wa8/zfckwzmxl4ALgXGAMsDDwYbx8e5VsOWBZYGTiiWq5McwHD8WV3FHBhzO+KwBrAkWa2QOnznwKuxZfp7cT6aMbMZgA+gy/7q4BtmiRkvoh38Z8ZmIwnZq7GezVsA5wT6xrg78AO8b83BfY0sy17+ffLAK9VluvFwJdSSjPj6/KhKOdGwFeA9YFFgPWor/Z200yU91V8vYqIiLRFCQUREfkwuTVa4f9iZn/BL/R78x9gYTObPaX0t5TS0318djvg1JTSaymlvwGH4RelQ/AL1TtSSo+nlP4PvxhOle8/lVK6NaX0XkrpnymlH6SUnk4p/Tel9EvgfGDNyndOTin9NaX0EvBj4Lvx/98B7gGWz1oi8HngrpTS/Sml/wDfBKYHPl76zBkppTdSSv9s8v3NgN+mlL6VUvpXSundlFLR2r4dcExK6fcppcnA1/EL83b8Bzg+yngtnmw5Pf7fS8DL9Ly4fTyldHdK6X/47QR9XfhuBfwb+C5wFzAUTwSUXZZSeikupDcCfplSujTW0fPATcBnAVJKj6SUXoz1+QKesKmuv8Io4N0m87qkmY1IKf05pfRcTP8ccGlK6ccppb8DE/uYp950a7shyj2qjTKIiIgASiiIiMiHy5YppVHFD1O2+pftAiwK/MTMnjWzzfr47BjgV6XXvwKGAHPGe28Ub6SU/gH8sfL9N8ovzGxRM7vTzH4bt0GcgF9Al/2u9Pc/m7yeqY/y9lr2lNJ7UZ5yL4o3ql8qGQv8Iid2/D0ms1xVf4zkAPj8Qd/zXH6CxT+A4c1u1wgTgOsjOfAvPDkwofKZ8jKYH1ilkpzaDu9FgZmtYmYPm9lkM3sHHyOhuv4Kf8Z7PZRtDWwC/MrMvmdmq8X0HtsSPZdtrm5tN+Dl/ksbZRAREQGUUBARkWlUSunnKaVt8S7tJwE3mtmMTNm7AOA3+EVmYT7gv/jF2tvAvMUbZjY9MFv131Venwv8BFgkbrn4GmDtz02fepTdzAxPErzVR/nK3sBv82gZG18uv+nls/8AZii9/kCeQGFm8wLrANtHAue3eK+STcysnAQoL4M3gO+Vk1NxC8me8f7V+G0WY1NKI4Hz6H39vQAsUE52pJSeTSltgW97t9K43eZtfN0U5mtjlrsiyrsw8KP+KoOIiHz4KaEgIiLTJDPb3sxGR4v9X2Lye/j98+/R8yL6GuAAM1vAzGbCexRcF93jbwQ2N7OPx335E2mdHJgZ+CvwNzNbHNizxec7cT2wqZmta2ZD8UdK/ht4MvP7dwJzm9n+MQjjzGa2Srx3DXCEmY2Oi/OjgCt7ifND4AtmNjjGCujtFoFu+yLwM2AxfKyH5fCeKW/S+3gCdwKLmtkXzWxo/KxkjcEsZwb+lFL6l5mtDHyht3+eUnoTH4tgZQAzG2Zm25nZyLi946/49ga+rnY0syVj3Iej257rzq2M3/bRTi8JERERQAkFERGZdm0EvGRmf8MHaNwmxjf4B3A88ER0d18VuAS/T/9R4HXgX8C+AHGv+r74ff9vA38Dfo9ftPfmK/hF6Lv44IPXdX/2XErpp/igfGcCfwA2BzaP8R5yvv8uPkjg5vhtBj/HB6cEOA6YhLfCvwg8F9Oa2S9i/AW/feDW2jPTngnAOSml35Z/8F4F1dsegPfneQN8MMbf4PN9ElA8wWIv4BgzexdPolzfLE7J+fQcW+KLwC/jdpc98OVBSuke4DR8kMZX4/f7zOxrZnZPzkzXZWb3lJ+kEWU6b2r8LxER+eiwlPrqBSkiIiJl0YPhL/jtDK/3c3FkADB/lObzwLoppbdrfjfh29KrU6Vwzf/nHMD3gOVjzAkREZG2KKEgIiLSgpltjj9K0YBvAasAKySdRKVD/ZFQEBER6Rbd8iAiItLaFnjX+N8Ai+C3TyiZICIiIh9p6qEgIiIiIiIiIrWph4KIiIiIiIiI1Dak9Uemvtlnnz2NGzeuv4shIiIiIiIiIhU/+MEP/pBSGl2dPiASCuPGjWPSpEn9XQwRERERERERqTCzXzWbrlseRERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpDYlFERERERERESkNiUURERERERERKQ2JRREREREREREpLYh/V2AD7Nxh97V9nd/eeKmXSyJiIiIiIiIyAerZQ8FM7vEzH5vZj+uTN/XzH5iZi+Z2cml6YeZ2atm9lMz23BqFFpERERERERE+ldOD4XLgLOAK4oJZrY2sAWwbErp32Y2R0xfEtgGWAoYAzxgZoumlP7X7YKLiIiIiIiISP9p2UMhpfQo8KfK5D2BE1NK/47P/D6mbwFcm1L6d0rpdeBVYOUulldEREREREREBoB2B2VcFFjDzJ4xs++Z2UoxfR7gjdLn3oxpUzCz3c1skplNmjx5cpvFEBEREREREZH+0G5CYQgwK7AqcDBwvZlZnQAppQtSSuNTSuNHjx7dZjFEREREREREpD+0m1B4E7g5ue8D7wGzA28BY0ufmzemiYiIiIiIiMg0pN2Ewq3A2gBmtigwDPgDcDuwjZlNZ2YLAIsA3+9COUVERERERERkAGn5lAczuwZYC5jdzN4EjgYuAS6JR0n+HzAhpZSAl8zseuBl4L/A3nrCg4iIiIiIiMi0p2VCIaW0bS9vbd/L548Hju+kUCIiIiIiIiIysLV7y4OIiIiIiIiIfIQpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitbVMKJjZJWb2ezP7cZP3DjKzZGazx2szszPM7FUze8HMVpgahRYRERERERGR/pXTQ+EyYKPqRDMbC2wA/Lo0eWNgkfjZHTi38yKKiIiIiIiIyEDTMqGQUnoU+FOTt74NHAKk0rQtgCuSexoYZWZzd6WkIiIiIiIiIjJgtDWGgpltAbyVUvpR5a15gDdKr9+Mac1i7G5mk8xs0uTJk9sphoiIiIiIiIj0k9oJBTObAfgacFQn/zildEFKaXxKafzo0aM7CSUiIiIiIiIiH7AhbXxnIWAB4EdmBjAv8JyZrQy8BYwtfXbemCYiIiIiIiIi05DaPRRSSi+mlOZIKY1LKY3Db2tYIaX0W+B2YId42sOqwDsppbe7W2QRERERERER6W85j428BngKWMzM3jSzXfr4+N3Aa8CrwIXAXl0ppYiIiIiIiIgMKC1veUgpbdvi/XGlvxOwd+fFEhEREREREZGBrK2nPIiIiIiIiIjIR5sSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSW8uEgpldYma/N7Mfl6adYmY/MbMXzOwWMxtVeu8wM3vVzH5qZhtOpXKLiIiIiIiISD/K6aFwGbBRZdr9wNIppY8BPwMOAzCzJYFtgKXiO+eY2eCulVZEREREREREBoSWCYWU0qPAnyrTvptS+m+8fBqYN/7eArg2pfTvlNLrwKvAyl0sr4iIiIiIiIgMAN0YQ2Fn4J74ex7gjdJ7b8Y0EREREREREZmGdJRQMLPDgf8CV7Xx3d3NbJKZTZo8eXInxRARERERERGRD1jbCQUz2xHYDNgupZRi8lvA2NLH5o1pU0gpXZBSGp9SGj969Oh2iyEiIiIiIiIi/aCthIKZbQQcAnwqpfSP0lu3A9uY2XRmtgCwCPD9zospIiIiIiIiIgPJkFYfMLNrgLWA2c3sTeBo/KkO0wH3mxnA0ymlPVJKL5nZ9cDL+K0Qe6eU/je1Ci8iIiIiIiIi/aNlQiGltG2TyRf38fnjgeM7KZSIiIiIiIiIDGzdeMqDiIiIiIiIiHzEKKEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitbVMKJjZJWb2ezP7cWnarGZ2v5n9PH7PEtPNzM4ws1fN7AUzW2FqFl5ERERERERE+kdOD4XLgI0q0w4FHkwpLQI8GK8BNgYWiZ/dgXO7U0wRERERERERGUhaJhRSSo8Cf6pM3gK4PP6+HNiyNP2K5J4GRpnZ3F0qq4iIiIiIiIgMEO2OoTBnSunt+Pu3wJzx9zzAG6XPvRnTpmBmu5vZJDObNHny5DaLISIiIiIiIiL9oeNBGVNKCUhtfO+ClNL4lNL40aNHd1oMEREREREREfkAtZtQ+F1xK0P8/n1MfwsYW/rcvDFNRERERERERKYh7SYUbgcmxN8TgNtK03eIpz2sCrxTujVCRERERERERKYRQ1p9wMyuAdYCZjezN4GjgROB681sF+BXwOfi43cDmwCvAv8AdpoKZRYRERERERGRftYyoZBS2raXt9Zt8tkE7N1poURERERERERkYOt4UEYRERERERER+ehRQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREausooWBmB5jZS2b2YzO7xsyGm9kCZvaMmb1qZteZ2bBuFVZEREREREREBoa2EwpmNg/wZWB8SmlpYDCwDXAS8O2U0sLAn4FdulFQERERERERERk4Or3lYQgwvZkNAWYA3gbWAW6M9y8Htuzwf4iIiIiIiIjIANN2QiGl9BbwTeDXeCLhHeAHwF9SSv+Nj70JzNPs+2a2u5lNMrNJkydPbrcYIiIiIiIiItIPOrnlYRZgC2ABYAwwI7BR7vdTSheklManlMaPHj263WKIiIiIiIiISD/o5JaH9YDXU0qTU0r/AW4GVgdGxS0QAPMCb3VYRhEREREREREZYDpJKPwaWNXMZjAzA9YFXgYeBj4Tn5kA3NZZEUVERERERERkoOlkDIVn8MEXnwNejFgXAF8FDjSzV4HZgIu7UE4RERERERERGUCGtP5I71JKRwNHVya/BqzcSVwRERERERERGdg6fWykiIiIiIiIiHwEKaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrV1lFAws1FmdqOZ/cTMXjGz1cxsVjO738x+Hr9n6VZhRURERERERGRg6LSHwunAvSmlxYFlgVeAQ4EHU0qLAA/GaxERERERERGZhrSdUDCzkcAngYsBUkr/l1L6C7AFcHl87HJgy86KKCIiIiIiIiIDTSc9FBYAJgOXmtnzZnaRmc0IzJlSejs+81tgzk4LKSIiIiIiIiIDSycJhSHACsC5KaXlgb9Tub0hpZSA1OzLZra7mU0ys0mTJ0/uoBgiIiIiIiIi8kHrJKHwJvBmSumZeH0jnmD4nZnNDRC/f9/syymlC1JK41NK40ePHt1BMURERERERETkg9Z2QiGl9FvgDTNbLCatC7wM3A5MiGkTgNs6KqGIiIiIiIiIDDhDOvz+vsBVZjYMeA3YCU9SXG9muwC/Aj7X4f8QERERERERkQGmo4RCSumHwPgmb63bSVwRERERERERGdg6GUNBRERERERERD6ilFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHalFAQERERERERkdqUUBARERERERGR2jpOKJjZYDN73szujNcLmNkzZvaqmV1nZsM6L6aIiIiIiIiIDCTd6KGwH/BK6fVJwLdTSgsDfwZ26cL/EBEREREREZEBpKOEgpnNC2wKXBSvDVgHuDE+cjmwZSf/Q0REREREREQGnk57KJwGHAK8F69nA/6SUvpvvH4TmKfZF81sdzObZGaTJk+e3GExREREREREROSD1HZCwcw2A36fUvpBO99PKV2QUhqfUho/evTodoshIiIiIiIiIv1gSAffXR34lJltAgwHRgCnA6PMbEj0UpgXeKvzYoqIiIiIiIjIQNJ2D4WU0mEppXlTSuOAbYCHUkrbAQ8Dn4mPTQBu67iUIiIiIiIiIjKgdOMpD1VfBQ40s1fxMRUungr/Q0RERERERET6USe3PLwvpfQI8Ej8/RqwcjfiioiIiIiIiMjANDV6KIiIiIiIiIjINE4JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqW1IfxdAgIkjO/juO90rh4iIiIiIiEgm9VAQERERERERkdqUUBARERERERGR2pRQEBEREREREZHa2k4omNlYM3vYzF42s5fMbL+YPquZ3W9mP4/fs3SvuCIiIiIiIiIyEHTSQ+G/wEEppSWBVYG9zWxJ4FDgwZTSIsCD8VpEREREREREpiFtJxRSSm+nlJ6Lv98FXgHmAbYALo+PXQ5s2WEZRURERERERGSA6coYCmY2DlgeeAaYM6X0drz1W2DOXr6zu5lNMrNJkydP7kYxREREREREROQD0nFCwcxmAm4C9k8p/bX8XkopAanZ91JKF6SUxqeUxo8ePbrTYoiIiIiIiIjIB6ijhIKZDcWTCVellG6Oyb8zs7nj/bmB33dWRBEREREREREZaDp5yoMBFwOvpJROLb11OzAh/p4A3NZ+8URERERERERkIBrSwXdXB74IvGhmP4xpXwNOBK43s12AXwGf66iEIiIiIiIiIjLgtJ1QSCk9Dlgvb6/bblwRERERERERGfi68pQHEREREREREfloUUJBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqU0JBRERERERERGpTQkFEREREREREalNCQURERERERERqG9LfBZDuWubyZdr63osTXuzx+pXFl2i7DEv85JW2vysiIiIiIiIfDuqhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUpoSAiIiIiIiIitSmhICIiIiIiIiK1KaEgIiIiIiIiIrUN6e8CiOT61uc3a+t7B113Z5dLIiIiIiIiIuqhICIiIiIiIiK1qYeCfOS8eehjbX933hPX6GJJREREREREPrzUQ0FEREREREREaptqPRTMbCPgdGAwcFFK6cSp9b9k4Dp7j4fa/u7e563TxZJ038SJE/vluyIi0v8efGihtr+77jq/6GJJRERE+s9U6aFgZoOBs4GNgSWBbc1syanxv0RERERERETkgze1eiisDLyaUnoNwMyuBbYAXp5K/0/kQ63dlq5qK9dcD/+w7TL8du3l2v6uiLRvmcuXafu7L054sYslaZiWe5cNRO0eu3XcFhHpH6pzN0ytMRTmAd4ovX4zpomIiIiIiIjINMBSSt0PavYZYKOU0q7x+ovAKimlfUqf2R3YPV4uBvy06wXpX7MDfxhgsQZanG7GGmhxuhlroMXpZqyBFqebsQZanG7GGmhxuhlroMXpZqyBFqebsQZanG7GGmhxuhlroMXpZqyBFqebsQZanG7GGmhxuhlroMXpZqyBFqfbsQaK+VNKo6sTp9YtD28BY0uv541p70spXQBcMJX+f78zs0kppfEDKdZAizMQy6R5+3CWSfP24SyT5u3DWSbN24ezTJq3D2eZNG8fzjJp3j6cZRqI8/ZhMLVueXgWWMTMFjCzYcA2wO1T6X+JiIiIiIiIyAdsqvRQSCn918z2Ae7DHxt5SUrppanxv0RERERERETkgze1bnkgpXQ3cPfUiv8h0M3bOboVa6DF6WasgRanm7EGWpxuxhpocboZa6DF6WasgRanm7EGWpxuxhpocboZa6DF6WasgRanm7EGWpxuxhpocboZa6DF6WasgRanm7EGWpxuxhpocboda0CbKoMyioiIiIiIiMi0bWqNoSAiIiIiIiIi0zAlFERERERERESkNiUUBDOz/i6DyNSi7VtEBhIdkz7czEx15w+I9pUPlrbtD9a0tH1rw+mSeETm2mY2zsym2mCXZrZx5XXHG2Pq4kAa09LOUZia67NTnS5vMxtmZuua2epdjNnRccXMRprZ0E7LYGbzmNl8wKBO43VLlGmwTtofjG4v72nx+NYtZraEmY3sYrwBu6wHQtm6vbwHmuoy7tYy71Kd6b1ulGUgmVrLu1PdqJ92a96K7w2kOmG311On2/bU2I4GyrZYNdCuv/qbKrVdYGZzA5cDRwNnAZ+bSv/nq8C25cpxuxujmW1nZoeb2R1mtmtpunWyk6SUUjcq72Y2rNMYEafTi+MRwAVmNtrMBncYa4iZfSwqgvOa2UxtxpnezD5rZrMW67+DZf5tYD/gNjM7EtrbpuICfm4zm684IbWz7KOCfB4wd/WkXTPemfi8PQqcDmxtZgu1U65Ybyub2UptlqX4zszADcAanZy0y+u6C9v3TGb21bjoLipL7czbUDNbw8zGm9mqZjaqgzINM7NN4meuDsrUreW9oZmtZmZW2t/arZCOMLNzYx9ue93F+lrCzBaOpMn0bcbp5rK+EtionXI00+m5ZGpu2x2cd2cwswOBQ8xs+3J56pRtaizvbolz5UqtP9nSkDivLAyN7aGN4/f0ZjbWImneSQXezBY1s5fNbNV2Y3RTF5c1dGl5V3VwrJzbzLapHtvajDckzuOdztv08f3/xvfbOj7FcXe4mS3Yzver4SLm6I6CeKPot8xsxtK0/lzWRf2744vuWNZzmtlyHcaZzsxmM7MVOi1XHJMONLMZStOs0/2tPw2YLNuH3AnAvSmlE8xsE+DbZvaDlNJPu/UPzGwWYE9gtZTSe2a2PLAO8BrwWErpDzVijY4yHwi8AhxmZtsDx6WUHmizfPsC/wIuTSn9N6YNTin9r2ac3YCxwO/N7PKU0rul9yx3BzazzYAlgD+Y2VMppZ/U+X7JCcB7KaXJUbEcAswL/LLuvOHJpjmARYEHgLfN7HHgyZrlOgrYAbjJzJ4Aro9totb8mdkSwCdSSsvGSfsiMzsH+BvwQErpuzXKdBK+XLYws+8AexfbQU3HA++klH4dJ4GZ8fX4/dx4cSLbKKW0UJyQtgfWA5Yys3NTSr+pWabTgDmB9c3svJTSoW2eSI4BRgPnmtnXUkq3mNmgNi52L4iT0EEppbehsW+0sY0fCwwqtuU29xHwZTRL/P034D0zuwu4q435OxVf3gsDj5jZgW3OW8fL28xWBG4BzgeWNbNHUko/6aAi8XW8HvLPqFQmYAzwRs3ldDowCt/nfgG8amYPpZSeqVmebi3rY/EGimPNbHbgnA4uuvcF3gPOL51L2tlPBuK2/Q38HPBTYHXgVjObE/hVzeNl15Y3gJmtBbyYUvpjuzFKzgfuAp5t8n/qrIPTgP8Cc5knmg9OKb3YRpwLgH8AK5nZOSmli0rlqbtdHY5vA5ua2Ysppb+3s12Z2WfxutvLKaV/1vluRbeWNXRpeUddYm5g/pTSwx1slxcD3y2WT7GuigvUmuvtbGA6YLYo38Eppecibp3ldE3sr7uVlk3tei6+rGcA/mtmJ6aUXi3eaGO9HWVm44C/xzngpjaPdacBT6eU/h7lmLH4u6ZuLWuAq8wbbPdJKT3dZgzwRiqLMn0jpfRE8UbNeGfh8zbOzH4EfA34exvnAfDl9GhK6R9RjmEppf+Lv9vZpvpfSkk/HfwA8wAPA2NL084EDou/5wPW7cL/GQvcGH8vBUzCL76+C9wGjKoRawfglsq0XYDfAKcAQ2qWbXbg18DTeIvgVjF9VPwenBlnBeB54GDgHmBdvLK9dM3yLA88F/NyLnBSm8t8duBxYI54fRJwM3Bp/D13jVjjgNfi7+HAFvjFxcnAqjXLtShwHbAbcAZwJHAYfpKrE2dr4KYoz6bAn/Ak1UTgKWC5zDjLxnqbEb/IuQnYpo3lPQq4D1g8Xp8HXA9cG9vVkjXW/8N4haaYtiB+MvhBzfW2LDCptN6uxy/CjgLWrxFnOfxkDbBNzNuINpbRIrGf3oxflBxS3l+B4TX3kydLr7+It3qeTI1jVizbl+LvwcBe+HHpdGC9mvO3HPCD0uu78YrlN4A9gOk/4OV9VPz/z+I9Xk4EtgXGFNtHjVhLAS8V5QCOiPV4WcSds8Y28GL8PR2e9HwCOA5YrJ+X9Rp4b73F6y7rUqy/AFfEvrZZ5f3cc8nU3Lbva3PbXgp4rvT62Vj/58XvJT7o5R0xPo0ncM4DPgnM0EGsjYHHS683BPbHzykz1YizCX7unQs/L9yPX8ifTb1j3KbAY/i5aW3gHLxh5nRgZBvz9j38XH4HcBUwXRvLaMtY3mfgye6FwR/h3h/LupvLO2JdiR/TfhrLaXTd+QNWAp4tvZ4Q6+xiah7HgQ3w+vLcwKzAV4E/4PW4GWvEmT6Wx+t4Xamoj68JDMvdFoBPxbJeAT8eHQBsDnyJzGNuKdbm+LF/PWB3vJ40Vxvb0mLEMSVeH4k3el0DfPyDXtYRawiewPsN8HO8zlvUnefCk8U5cYpjwDx4nfkYYFf8fFnnmLRZKc6CwEXA+LrLOmJ9DHim9PrLeMPFVcB87cQcCD+65aFDKaW3gH2BP5cmXwYsHX+fgW98nXoTmGxm++AV5LNTSoenlDbAD/yfqBHrDuCPZrZmMSGldDF+gJsVv5DOlrx3xGF4hfQ7wDZmdhFwp5mNSPmZtr2AM1JKpwD34pX584D9zGwnyO6CdQxwekrpYLxy/HEze79rqGXeahDzNQlvmVwar8gdhFd2Z8MrrbneA143s+VSSv9KKd2Gz9vvgbMts+ub+W0XvwJ+G2W4BK/oHgksaWbz1CjT7fjB+lb8wH1ISumhlNJE/IJ87cw4OwFXp5T+nlL6C7589jWz4VHm8TlB4rsPAcubj30wD35hcxjwMzKXd0rpebxCtIf5rQrDU0qvpZT2wU++y2bOF3iF76r4e2O8i/HtQAK+EdtFjt3xihb4SWkU3iK8WI2ygO+fh6aUtoqybQg8bWZbx/vHmdl6mbF2AlY1s/nMbGX8JHsd8E/gRDNbKjPODMBvzGyx2Nevw3srvQh83czGZMYB2AqvxGBmWwKr4JW4n8R7uevuS3RneV8IHJFSugFv6fwt8HHg82Z2DH6cyrUm3qq8iplNwLelo/D9ZX68kpFjbuBNM5srpfRvvGXpD/g2ebGZzZp5nOzWsp5AYx/5MTAZuCV6d9T1ceBqfJ9/ANjRzM4375EHsKXFrRkt7Eh3tu3hTLlt/5v2tu0V8caGokfAUGBvPJn0a/pneYMnJb6Cr/djgSPMbGmLW86irLl2AV4z7+K8K36+nBFvWLnCSt2pW1gUuD2l9Ns4L5yNXygPxY9buT4LnJy8pXU5/ML513iPkyejtTnXnsCJKaVfRhmG4beuDINatx0ui19oPYkfv3fHezzMHXE2jd+t9uFuLWvo0vKOso+LeVocr6usmOKqqYb/AG9FzD2iDPdGeZ4ollWm2YEfJe/N95eU0kn48TYBJ1jmrazJe0och9cpx+C9X/+K15Wmj2NxjrXweu5z+L67A74PrgCcZPXGezoAOD6l9EBK6QLgHeALxZs1znd/Av5hZsuZ2Y74uWAC8AJwnUUX/wyz0YVlDX5bCY0k/qp4gupn+IX3DCm/V8BKwGVxrTYzfm4biq/Dayx/HJrPA99OKb2VUnotynNE8Wb5eirDX4G/mN+GsRuerJgI/BF4xswWqRFr4OjvjMa08EMp+4pvqCPwVtXj8C5b3fo/C+EViZvwHa1oJbsF2CUzxhC8UrsXnm3fFa80DY/3nwC2bqNsi+EJhRXxg8ozwC/xHW72jO8PwrsP3Yif3Cbjmdyx+AHyCjKyiXh30suARUvTDgIuir9XB/bPXaf4weexWOZfLr2/D3BhzWV0AJ5w2QqYuTT9FGCHmrHmAy6Iv8/BW4S/jlee6sQZBSyDj/txLtHCCTwCfC5zva1KpXUNb8Ubgbd4vVCjPGvjt+E8BBxZmj4BuCHj+3PF73F49v80/ML7kzH9BeCzNcozU2lb2BpYp/TeCTX2uylaH/EeRhPJzLSXvjdL+Tt46+ukmLdf1Iy1K/A2nvDarDT9JGDHGnGOwFtKL8VbWw+K6aeWl1lmrKHxey1g7dL0w4HDM2Ms3Ony7u1z+DHkELwSVqsnDt6Cczte0d6kNP0g4NQacU7FK7bH4Mf/fWP6GWT2LKos67U7WNazNZl2KJ7sHFNn+cR354zf0+NJliPx3go34beG5MbZA08AvQds3sG2PRE/p1wWy7qTbXtQ/F4aWL40fXfg8swYo7u1vPFk9HgavfAWws8D9+M9cQ4GHsqMNQw/Fp2I109+Q5yH8ePoFeT3MlsNT5ZsjTfIPIEndJfCE06jWnzfojwrx99D8fPjwqXPnEtG62t8f8ZiGyrtM6vhdZ6sc3fEGYT3SJgtpo3Bk2dX4XWyU4neRy3iTIefE78R32l7Wcd3VseTZFu1s7xLcc4BPl96PQF4ovR602IeWu0nxX6K15vGl947E1izxrzNEvOwfWX6rLHcc7fJwbHcvwNMiGkP4Lec/ZHMHksxT2/jY1f9o/j/+K2d15LZ2yjKsj9+jCzqKJsC98ff21HvnLItnuA+juhlHNO/Xsxv5rK+FvhiJ8u6tJ0PxnumHB3T7sd7+b1abEsZcTbBe5UcAbyLJ7iK5XcZsFJGOYbhvUGWoXEMnwO/zjE8KXRX7rzF9w+In8Mo9ZqL7X7zOrEGyk+/F2Ba/cEP8u8Ba3UYZ1H8In1zvOvYXPiF0v2x418CPNhm7HXxpMLt+MX8BZS6PbURbwKeNV80DrKb4q3wn8j8/qxxMLoKuLXy3lOtdvzSZ2eg1J0xltnt8fcDwGdqztcn8JbKP+Bdk4bjXdZaXnDH94eV/t4Tv8g9Ar+IH4xfCLa8MCEqMqXXh+MXSa/HgXx+YIGMOF/EW6LuB1YpTT8ltqd7yLh4r8ScLn4PKW3/n8V7P7S8gAcWLP09L15p+E/M39x4BafPOPgJerfS61H4SfV0/CL3MWokgYBFWrz/UM62VF0npWW0fMQ4N163qmR9NdazleOU3v93zrKOz25eeb0zXukuTpYPZ87bF+L3XHhr2xHAxqX3n6JUOWkRa2J1G6+8/0DmtrRn5XUxT9nLO5bHaXhibZUm7x8OPF9jW1qFnheQG+EXzEXZvkteAm8MnhSeDb9gPgHYqfT+E9V1285P7rJu8j0rlfMi4Moa3216OwOemByPt+p8PiNOdb/4YmzbRdlyt+2Z4vcssW0f1O62Xd0Wm0y/L2f99xavneXdIu7qePLkP/igpnW+Ow6/0DmoMv0H1LtFaGs8uXwh8PXS9KcpnS9axBhe+nvWynuTqNFlmSZd//E6zk/w83qfiUp6NjwNr7y3NH7O/DuZ9aX43hL4RcmBnSzr+M72MS8XtLO88STAxyklz2PaU8DI2A+z65d4wuZHeOPS0cVyi2ktbxPFj5NFcnJdvNv892hcwA+L7WuFjPkqJ/AXxC/6tsdb48EbvVassf53xXsFnUHpNrXc9RZlLxoUpy9NH47XoRfBe8Cs3iLO9HiP56F4I9XZeN32VrzeOwa/gP9k5jobgp/rftbOsq7EKo6/M+LXOrsAP4xpW9bZvuPzE/CB88t13udo47YFetZzN8DrCdm3hsR3F8DrpT+PfW4+/BrohVbrbaD+9HsBptUfvLvX8R3GMPxgfh1+//ezeIV9MJ45Xjc25oUyYx1Jk1Z+/MJvD/yA/7F2yhm/Z8EvSl/HB2KBaPmoGW86PCt6Pt5NeGfg4czvlXsmGHExH8vvQeDajDifxS8YbiZaEeMAvX2si2spnXBbxNoNvzj+FbA+fsD/ON7yc2cciE6rEeeXwBYxbYGYp31qLNvR+Al5M/yEth/eu2TRKNtGeI+DlusNT0J9Gz/BVivxG+AJtTsz4hyBP41hV3pWRoruz2fgt2PkzNfY0ra4MDFeQuwz85B5fzBeSTu4j/f3wQdi7TTOUpnr/wjgvsq0UaW/VwOuyZy3r+ItWINpchGH38J1X0acw4hEXS/vH0xmDy28xf+92OfniH13cOn9AzLLVMQ5BK/EGD0rgy2Xd2xLL8Y2vD/e3XZ5ShVG/AIza2yXWA4PAT+kSe+oKOv9GXG+jFf03gE2bPL+4fggT63i7IwnFB+L+RtLKZFTY1nvjFf0ijjz0nM8j/nwLqI5y2hXvHL2XMznAvRMxO5GqaUzI87z+PFtTOX9fTLnbR+8x83P8MGL16Z0D3DEzt22N6Jn74/3L1BonJdbxoptt9cLOzyhnLW84/Or4hfCawKzxLQhNM7nx9PH/t0k1l4Rq2jBH1V6fyIZPTBKcdYqTSsv9xOoNDb0Emfv2DZPBZZp8v7Xc8rTJNYU+zzeOtly/CI8sX1eZbsuJzyOA+7OiHM0leQrpV5Cucu6EqtcppFtLO8hxXqicbE1qPQ/voAfA1drEecoKr0P8N43r+H1gCuJhHBGmc6J/bg8b1+NWFfi9a9LMuKcjZ8DyuejffBbjSfklCW+c0Ysh/L5aFs8efut2D6uy4x1Jp48Lh9vB5fm8U9k9E7A628v0TOpsQXesPQtvL7bsq6LHw9PjXhF41Kx3rKXdSnWNyPegjFtR/x2tR1rLO/dYp/atzRtI7zn01V4XePqjDhbxPreOdZfeTvYDq9vnJNZptXxxMY6xLUW8JnYN07Dry2+lTuPA+2n3wswLf9Qsytzk+8fQgyeiLfSLIF3t3qIUmtXZqxTYsP/A/C1LszbTLFzzl6Z/ingrC7EGYFfsD6Nd3fts/KOXwTchCddXiaykDROanvG/E9RuajEmQO/MN0Nz4jeSGnAzfjMzJnzNhrPNi4fB57j8C6YHyt9Znb6aJXtJc4xePZ4SUqtLmQMfISfZE+IvzeJ7eFbeJfJc6kxICeepf8tfsLYCa8oz1+UCT8ZLZUR57N4QuFovAKzHn6rxMrxfsvBjqLsxb6yYGwLN8S6nFhzvkbjFzbzlLaJtYnsOn4f3j606L7XKk7NfeRZGrej7I5Xcs7EL1ZH4C3Ws2TEmgVPShWJluUjxqejvENju281b6PwzPq88Xpd/CL1S0RPIrx7YE7L1mh8P18HP6nuWN6m4399nhZdQSPOs3gF/2p6dnPvkVhoEefrxDEMTwL+Ab8QmIwff7MHYov18nO819SCsU98HU/GfA5Pgn6cUiK0lziz48e1OfBkxjfxBOfWNLqrr0aLQRkjziv4sePjUbZnaCQph2cu697ifKrOtl2K9RbeC2E9/JjbY2DX+D99Js57iXNLEQdPoOVs27MBv8OPZaviPQIvxpOvxYX30pnb9pyx3dzHlK2RVpq3PmNFnNfx8/hnKJ0vqbSgZi7z5fHW0MvwbvtHVt4fFvtRzu2K1VgTK++PxZPzfQ46WolzD3BM5f3ReNKszzLhYxQ8F/vHefg5cx4a55OiN0dO0rzPWPGZ6WhxTMD3/5uB/8PHTfpqTJ+79P46VHpRNIkzMvazX+PHui8V2378Xij2nZYDvFZiTSJ6dtE4nsyD18FytoEz6OViD68TvkfcotlHjKVjWRct0+Vky8z4sXgMGQMXRqxXaCS3FsVbzhfCz3Gr4wnPPmPh57CXiTof3ltyIfxYu2aN/W1Z/EK2WLaz4Oe12fEk3GV4D6icc/jSeH3wbrxuPH9MLxI568by7nNwxpi35/EL91fp2ftqeJRtOC3qTjQGQf9C7CPb4A1wtZZ1k1jn4se6xfDjcJ89RitxygO83xvbzlyx/uajcSvsyBZxVsSPSQfiic7H8LrF+vH+nHgPnCluQ+sl1iT8fHIifh13DJ6Inz62kZG0MdDrQPnp9wLop4+V4weab1amDcEvjk+oEWcG/AQ6Dq+8PItnDj9d+sxOtLiwrcS8Ee9SdSh+Ipyu9F5R6WoZr0mccjZ5FvxkPSojzoU0LpT3i4NtuYvZIGDLjDjnAMfG39PhJ8pvl97PHoUXv+A7Kf5eA+/OeCKeEb41N1Yvcb6Bn+huInMU5tgO9iJa6fFeG1+Ov2fGL8Bb3jJRircEfs/8LvhtM5filZPiYNvyaQqxXmaP+Tgg1t1peKXroMxyzIhXtM/As7/PxT4yEm/VuzJnGyrFO5do6cUvKu7FLyR/T2MQzJyTY19xLitv6xmxvoX3uJgd+D6eANgGv9Cp00W2r6fF3IonJ1penMT2cnX8vUgs86/i++FNRKIhs0xn44+sJebrt3hFNvt4VFreE+PvnfFeQROokUyI764dsT6BVyaKfaToTpo9oj5ekbkt/l4A77b/JfzC6BoyEm7x3b2I1qvY7/6JJ98eiH2vZUU0vrs7pVYZ/P7b+/DK9wlUeoa0Gecb+H6d+0SGTaj0ZMLv5X4WP6Zk7ScZcQbnxMKTvndXpq2NVySvp95TBg7En8K0OF55vBNPJs1a+l85CdOJeGV2D/x8cAR+viyO5X1eiDaJdwdxARjb09OUkvax/rJG5m8VK6a3TMJnxsmtU+wWf+9Joxv/9URrK5lPCsDPib3FOiZ32yzN0474cfdZPAn3TxrjKeQu77Xx8+Rq+HG7uI9/VLyfdSzIjJVznlsGP04WF7Tz4Bfvnyx95jpaJ4Lev80Kr/uegvfq+ho9e7vkNJxMIFqN8YTGQ3ivvNvwuqblxIplc3z8vQn+iM7b8HriTjE953h5FNGQh/d8uwZvRDmBuH2D/IT3XTRuNTyf0thepc/MkxHnXmC7+HsfvP7UbDycVsvoLGD3+HsPvH57BY1bqQflxOkj1uV4neJY/BieUz+5qLR+9sNvv7iDeGJTjfKcDuwaf8+IJ/LPwOsHRW/YKZZZL7EuIMZbwRsGrort6MQ6y3sg//R7AfTTx8rxA/MjeEVtodL0Gal5zw5+MTKm9Ho7vDXn7jhAZT9aEb8oeRa/kDwydrr9aLQAzEhGpauPOEUL5/TkVbYWjjjTx+tB+Alk63g9JxmPiIpyH0bp3iz85F+Mv7AYXsHIqdQMxS+OijIdD+xdeu8WMgZPw5Man6bRnbAa5+acOOVyxe8hVE5ksZ1tWXMbPQLYNv6+C2+NO576j/r8JI1Wm3Pwrsan4Ce6nBPICLzicAFxAVd670EyH/EYy3sv/MR2DH6BVLQCzYRXJMfWiHNOL3Guo8bjgfAu5U/hCY29StO/gl+Q51ZGDD8Z7oOfoMv3319N5TF9LWJ9E090nEJpcEq8krNrZowReCtCufvuyjGfK9coy6x4S0u5O+Kn8QRHy8pVJdao+N5psZ6WLb33XTLu469sBxfhCZeHKfUOi+V2DHkVm3nxytBJ+IXWfqX3rs3db/HWmUvwCv/8eBfVL+HH2gvIT07OhyfFmsU5PzdOxBoeZdqJnl23p8eTgbmPU+xWnCGxzo6l0nMklnX2mAL4uWiR+D0L3nPi3IhzOJ7Yb/k4NbxVcoGIs25sN2fg587P4y2MuetuLH5BM1dp2mnEgL6xTretsR1c2yLWFz6oMuHHgaNoXDDej9/eNxxvKf0O+RcAswJHlV5XY11BfiLP8GPdnTQGJnwIf0Tq/ZQecZwRZwx+HFozpj2GJ2EvJvNYR2OAyN5iXUTmAJ94Auf0+HsdvB5wA34ReFRM73M54ce3/8MvQufA66WfwweGvDrmLftCK+brGrwOfQ4+wO/0eO+lS8loUY44H8PrarPFdr52bPNfwI9/WY9CjP9/G56Ivxs/Diwf2+rl5CdNl6N0C0rEfQ0//s5AfkJ4fkq3V8Q83YpfdGcnJ+P/fSm25ZXwnl2bxbJeJfaR7MdY4onq3mJdTl7Pm+Jxk80GeN8Ov5DP7WX8pWKbjNdX4knBY/A6Ym7SfDBeV76YRoPrZXiS8nJqDqY+UH/6vQD6abJS/CBYvs/qJDxT9228y84GeKY856K9z5aGOOj+mXot79MRtw7EAfLzeOX4m/jgJy8TF70t4sxA4z6i3uK0HB8A7wr5KUrZS/zetCKzfA+wc+a8TU+j1WdwlPER/AR1KXBAjeVUlGUQjRGYi2l3UH+E+KFEpbhunNhmluvj/XWBx9rYVhfFK7Yzxza6Dn5BkTN43gylv0fE8l0fb7lZBe/utmaLGPPirX+LRozhlHpZ4BWAlvdeN9kGVsVPStdWlvf95F+8DcNbb44iBrmsG4eeF0Zbxf76Oo1eINdTukcws1wdPS2mNA+z4z1ursVP3ovH9BuJ5EnNcg3DKwPT0bjYykoExfeLezeLe0lHxLb4EhkDuuLH3cXwnlzFU282iHW1On7xlr0tEZXWiLs6Xpm5ksax4Foyxz/BK26fxitcJ+GJpOI4dTMZCZzSsj0aryRdhSdMRsT7D1AabbrVNoAnue6O+Wg3TjFo2sYR5yB8fy6mP0JldPZe4hTrfJOIc2CbccbF7yXwC9qJUbZFYvr9OftIX+XEj1kb4/dgfzXjO0V39nLvvVH4+fJofMT4Wrcx4pXr8mBuiwI3x9/fo8bAxTE/HcfqZplKMT5eef0kTQZZzYy1WqexYrv6Gt4I8kqsx3Mp9Rbt47vlHpfr4I0vC+HJ949FnA3amK91Yl+uHQs/xp4bPzvg3fo3xHsHfgI/3tXpMfEtvLv+Y6VpQ/GL8azxuErHgqPwsXAep2eD3JPU69V3In68vJee40s8Rr1BPb+N9y66ubIuHyL/yQ5Go0dLMZ8L4heqU4yr0yLWoEqcQTSS1nV6Kxted7sQT9iV63VPUy8Ba/iFdkex8GTgNfg5rtrAVGeA9+F4g80L+Hnl3tJ7N1Iv+TIm9pN78UTLQzF9RjypkH0NNlB/+r0A+mmyUrxCtnZl2qKxczwcO9uOmbEOiQ34WHzQxcUr719BxmMUS59/v9JeOSgugldw7qPGYwKbxK8Vh0bXvJlK0wbjt3Zci1fCWz4FI5bvSniFtDoq/wHEqLWZ87AxfVwI4SfcxzPiLItfYH+Z0n2/pRNBbhzDKy8fr0wr4ozFW05atk7TeOrIFsQjuPBeHe8B59dYz5tTaXGi8Xifb2fGMBrdNIuu9nuXttFZ4r3cxwutTynpgrdKjC29Xo+8geGKbWljGhfrs7cR59P0fFTpCPzewG/gAzLdBFyfOW9deVpMzNN6pdeL4xdv10eMG4B7Msu0HH7Bvh9N7mvHLy5bbgsRo/wEhUGV94+nxSMQK9vSRbFs94/p+8T+czqV43KLdbdfk+lH45WH24BHMuIUx4DdaVzsjsIrfvvgvTJaJgKpHJPwC5vZaSQBtiJGLM8sz66xDU2PV4hG1olT+uyRNPbXlfEE0DV4xetGMo5vTeKujFdKr8Arg1lx8BbRM2n0Klscv43nHPwi4L6cfSS+O4KeCQCrvL8V8JOMOFvHMpqxlzj7Aa+1sYzKDRZDYzu/KJZZ7uCuE6hUzPGk4CD8WJAdqxtlws+Tn6GX1t4ob8tBDyvfseoyrxMLH+D20zQGh54eP+6+R2XcihrlGYT3cjsNbww6tNm20UeMPfFEe3H+H4EfR/5Co1t+rW7XsZ1eD1xcmf4kLXqa4Y1HK5Rez0PPcaZ2pM3Hr8d8Ponf/rY9Xh9+ION7WxI902Kd7YCPx/RorM+9yDh+V2IuhteVXsDPAevgt+ZlNeTgda7q47mHxM9eeGIx69GV1W2bRlJhSfzCPadX0Z70rAsMxc/Zd+HjYu1F+0+gG0Jj4PKtc2PFfrlq6fX0eG+CC/DeHFkDvFdizoIn2dagcavaHmQMgFwtUyzfDYgeE6VYWY/mHeg//V4A/VRWiG/w77eKxkFoI6J7NBldfkqxDsIz+6vgWfFT8Gzr5hF7Zmq0kscB6BT6uP8Qf87rFhlxdsErQ4fTpNtYjTjn00vmOg7+/0eLzC09n6ZxBn6P+jdptADOh4+a27LVLWK9SM+L9xlo3GowL5617/MiN+K8gFdwT8EHTfwOjQGLsuLEZycSI+ziLQf745WRE/CLzEFkPBaol+V0HF6J3JpGpanl47PwAXNWK02bNaYvT2WU6BbzVdxXvhieYLkAz5YvhieWNm41X6Uy9Ui6lMuAn1RuzFxv1SezfIPGRUqdOM8Wywg/wc5Lz4Gq5ifvnvBmZar9tJhm23bpvTnxlpJVyeji2Mv2fQU9E4NjaHFbSKv1Fn8vS+tBBqvb0gb4xcxFeCvFMPK7NzZbd4tEnFF4xfDTtH4saXkZnUrjGDAcr9yciSc7c56jXT0mDaKxv84Y20OdY9KpeAX2Qno+3qtlnFKs14sy4T0nRsc2uVBsk5sRScsWcU4lki2V99aJ9bh5ZpxflMozM175m5F4HC9+W1bOuDCGJyB2xFuie4zlE783A9apuYyG4kmOcqvd7rQ4T5Y+OxOeWJq98j+KMp1IxsDF8dk58P1ufGV6cWHyjZxY3SpTbDuv03hM3RBi0Lt4PQ7v1dlyQFz8GD2RngNolm+lWhj4aatYTco0NLbzxeg54Gir89xIPIE3X2X6x+h5S0bOrVOj8dtjigGGi/W1KqUxi3JiNYk9gtKtG7H995lcju3oeSKRX10WsS5+lrnephjgGz+ezIwfc7+D3w7V6ljQo0yl6YvijXJ34Re7rQbAnY4pH1U6B77v74/3nD2JjFux4ns/rJap8pkdaH2em2Jbqq5r/DjXat56246mw3viTcITsX0OgNvX9h3v7Y7fLnhuq1hUjkk0kiUj8Wuh3AHeey1PvD8n3rDbcmD8apmavD8X3hO71kDdA/Wn3wugn9LK8J3x/2jcY3c4fhHwPJ7RzM4+xve/QambIH7xcADeupU1GFgl3kTiOdc0LvrKXclmocXj/eJzJ+CtPbvGfP2ByLLXjDMRuDT+HoZXtFcqvb8tLUYWjs81e5rGpXg32U/ile+sZ4TjI7hfH3/PjGdHL8J7S2wU05fNiLMXpUd2xUHuO3gFZcsacUbgXb6Lloei1e7LeJKpztgZvT115EEaT9Vo+TSFWCZ3x98z4MmJa/GuaJ+L6Tn3AW5FPJ86Xg/FL2onAl9pY9uuJl1Oj211PF7J7fORV30so8tiW/pEjTjH0RjQcXjsJ7fi9xVmDVaZsd5qPS2ml237Uvwe16wu7pnb91YdrrfTYr1ldWtssS0dS+nYlBmr2bq7Bb83OftWkF6W0ZX4hW+dLrs5x6ScQfN6K88vaRyTcu9NXY/GYJVz4Um2m2MZ7Vlj3ooL15fxLs7ZYzdU4qxY2kdG47367ontse76Pwr4I16hPhnveZV9H3GLZXQDNQasrcTrdQDkeH8O8ns+ngKcEn8vjB9jrsMvaobHMpzwQZUJvwWoGITvY/hx6Tp81PnNY3puN+dr8TrXLXiL8hQNKOSde5uV6Xr8/FuM75RzvrweT96fiV/QlhNKRc+VOk+w+Xb8vQTecHJdrL/3E1cZcaZIulTeH4MnMVslXc4gBqXDE3c74nWUdWLagmTcPtnLttTWaPmVMo3DkxAXU3qUaWacm4jexvRyXCR/3JM+l1ONMvW1LfXoddzGdnQTXqdcKqbnPqK71zKVPpMzllqzY9INeIJjAfwaYWTN8uxQWUZFkqLPhEsvZVqkVKb3n6BD6XHUH/afQciAYGaWUvo33lXwEjO7Es+q7pZSWh5v8drHzGaqEfY54Fgz2xQgpfQSfpH0M+B4MxtRo3zz4Dv7MTHpy2Z2OXCimV1sZmNTSn9OKZ3cIs4IPIu8X0rpopTSBLwL7WZmdq+ZzZ8ZZwyevTwrJh2Ct5adaGYPm9lSeHepgzNm7xm8kk5K6a8ppVfwx0YWFe73UkrXtwpiZjPimf7XzGwsfhIaid9/dz9+0UJK6UcZZXoceNvMZo7vvJNS+iKeENogN05K6a/4RdYKZvYInsHeO6V0Bn4xsKyZLZ1RHmi+nHbCT+Sbx/T/9hXAzIbjLe2/NLO18YvR/8Mvwo4HPmNmM6WU/pdRnh8BW5nZ5bH9/Sel9Bp+Abdp7nzFNvlZPOMOfiJZJOb1b3hSLqWUnsoI12wZ7YpvSxvnxDGzocB/gV+b2W54xfZt/MS4GbBRbP+5eltvN+Dz3VIf2/YteELp82ZmNcr0JL1v3+ua2ZCMMhXr7RcxqVhvr+Hrbesa5XkJ35auqGxLlwAbxvGkpT7W3fb4PvLZGuuu2TFge/xCZXszG5xRntxj0rsZ5XkC+E2T8uwPbGBmgzLjgO+7fzezufDK8c9SSlsBm+LrYVzGvE0H/A9v8d0Sf3zl02a2Xbw/2MwON7NhGeX5GfCemX0SP688lVLaGN/ftjSzFXJmKtbJT/FkwLHAO3hiexczW97M5jOzm81shoxwzzPlMvosfv7cMGcZlco1Fr8QuRhPlm0B7Glm4+P9mYC/ppQuywz5LL7swRN4KaZtChyRUpqcUrr8AyzTg6W/D4iynIUvww0BUkrPtgpiZqPxJMZBMV/zAVeY2Q7x/gxmtmbmOby3Mr2Ad7/OOV/OjV+cnxjfXwU4oajTAcnMFk4pvZdRHvBem8Vx4xj8qUy34z0Ei2PBfzLinIuvrxPN7LBqXTKl9Bu82/xzLeL8FH/yDXjvwnnwpPkJZnZwSum1lNINrQrTy7a0h5mtFO+PiLpHjnKZzseX/5/xeT08J4CZLYpfaD+LNwweZWYfK+rvxfJKKf2rjTJVl9NXM8vU27a0cXxkGN47LEez7egm/NyyM0BK6R+dlsnMZjKzheLaqJVmx6Tv47cX7JhS+r+U0js1y7MyPZfR9FGen2aUp1qmb0eZnsGPk/sBpJR+kBlr4OvvjIZ+pvzBs6IP0/NxhTPhG+LImrF2wjfkvSh1G8RPtNmx8ErR4/iOsDOerFgNvw/6PDK7XUasr9KkpQ7v/rV1Zox18PuQ98AP2M/h3UFH4AeDrFHmI9a8eAvy/TR/msbqGTGKzOXKMR+3A9+vrL97yWidjs+PxC9m7mPKkcYfJFoWW8QYQqkFJNbb50uv58cr9lmZfLr01BG81WGvmL/vV967kxY9ceL/FS0zI/Hs+N14dnw43sX9Z7nzFXE2xBMj3wOeLU2fL9Zby6dW4L0P5mqxjPrclkrb0Rz4hcNpTZbRTdR4Ggc+Rsb32l1vlW37FPwi+Zkm2/aqmeWZPn4uw1uBF6m8/yAZtynF73XxFoVHKvtbsd5adZMsd0cfhbcu39XhtjQH3ip9aifrLtbPRbHeOllGq8Y+0vZ6w3tpzYjvs/fQ/JiUNSgYjdu/joxYZ9Dz9qfryHySBn7snrP0eiv8mHYTfjw/rcZ62xpPsp2Nn++KbveXktdKXizvGeh56854fD8+HT/vXpRZHovt8N5OllF8vtVAyq+QMZByKd6YWMZfpudo8XPj576WTwgo9q9Oy4Rf2IzAe3DdSGkQtnjv4ZxtvPSdUTTOL/PiI/oX43r8nIyBmeP/jmxRpqwBHfHz9FD8gm81fMya8/HE4k9o8tjAPmLNEmX6JnBTafoIfAyZnC7qc+C929bBH+94bsQsHok3Ay0GUy7FWjyW7dZE79eYvmhMz328Z84A31nbN43xypqV6eqcMsVni57GY/H99wE8sTRDLK86A592XKbStjSkybZ0EJ60yNqW8PF3butkO6qUabpetu86ZRqDn+OaHZNuI/8JKL2Vp9YyiljztChT9uO1Pww//V4A/cSKaNL1jZ6DOl0CHJsZa+XYGVfBT17bxsH1fLwCdzNwVhtl3BrvBfA8pQH18MFZWj7Wh8Z9Vp/EK33n0PPexM/ilflW9xQWJ/uP48mER4jn6cb0bfCTeMt7+Suvj8JbKk/DLwzrPE1jOnpevK9O6f79OEj9kBYXJnGwLz9C73h8sKSzY71uj1e0cp4PXVwcndrL+w+T0QWbLj11BH/02eql16tRGrUXTzQ8lxHnQvwi62y8ojUcr9hcEuW6Gtij7vYdsbejg6RL6XsnRllOrbMt0bjYHlH6/+UBqhbDn/mds02Orqy3U0rbd531Nm8lzmbAJnW37fjs8vS8V/cbeOtPre0bvxiesfR6l3bWG41Ba0+I/70afmvK5V3YlhZoZ93hFeIRNLpEnhDL6Nyay2iOynr7VDvrDb9VrrzfFuvsvDrlie8uQ2lEe3zMhT8TI53j54DnMuat1bH9Inzcm1ZxZoz9rVjWn8HPb4/gSaG94nXOvBX7btPHEuLd53+fsf6H4+eTIqFxBH4OeLjOMirF69pAyvixoDg2fTq2gefwwQdnju3hh5mxplgObZapuP1yCF75/wWeZJwf724+KSdOi21kFvyWrJdqlmm6KNOrdctEL7f94cfu8XhC58XM8oyiMWbK8jQeZfxN/DixGvB8jWUyG9GNn/aTLkV59ol5eRzvWTAYT4Jml6eX+HUH+DYaddQDo0yPtVsmphwPoqijvJi7HZW3J7xOfxPeO6BWmfDEwxRPJMDrJtnbUvy/heLv5Trdjnr5H7XKVKy7+L0hfux/njaOSd0qT7HO4vcmdHCc/DD99HsB9PP+hfGeeIWzehAahLeSPVZ9r5dYm+GDj5yAj2xbPPN0Nrwytzs1HsUW3x1f+nsEnvGdmUaF5yZg94w4e9MY9X4OPLHxO7zHwmdiHnNaJM6i0bIxQyy/mUvv35xZngPwruhH4MmS0fhFwNV4BbfO0zROiu+cjic5yr1BhuHZ6ZyL92Pis6cSAxzhg8BcgN8zdzoZLVP483PviYP+E3h361XwE8v0eJLgtMx568pTR/Cunk2fJR5lup0WF29xQL4L78VzKz6C7pZ4a37xSLbs+6jxHjwX4Sfr5Zq8/1Dmemv23UXxFs+H4n/kLKOz8JbVc4h7NmmcLKfHWyaz7jHHWwpuIe4hjmmL4S2u2dt3s/XfzrYdn/8+fovLHqVpc5S279Myt+8iqXQOTY6LZCTLmHLQ2pPxBF7RsjSOej0Teu01FOvu+px1hyczboz9a1xMmw1/pNp3aiyjYv1vWppWbEtDYzvL2ban2G/x88DJdcoT3/sRPuZBeVC62fFk90Wx/HMeOVscuw/FE8jjKu+fR95FTdED5ILYpsbHstkPv0f8MEpJmBaxzoplfhale5tpnCe/QcbYB/gx/vYoU3HxPiuewL0YP7fn3lfelYGUS7Gup3TPbyyv4/CE4P14q1ufyyviHBPrbyJNGiJyyhRx9sVvLTkrtqMZ8F5LJ+PJsrPJfJQifqFeTrhUGxxeoUXvIhpPhflWlGtYrLv1Y71llSni3IJfhM7Uy2f+1Ko8pVjnEY+xjWmL4LfifRtPcl5JxgDGsWyn6K1HjaRLlOcg/CJ0+5i2GX68Ox/vufYg+YO7djzAdy+xN49t9Gz8nJlbpg1ifooeQUMr7/+D0jm5xbwdT897+D8TZTo/t0wR5x766ImYsy1FnFfj/4+KacU1xSn4QIy525Hh4wjsgZ97m6233DLtjPcA+CqewBuLH5OKulTLY1LE2g5PJH2pus5yy1Mq0x74sW23mLZ6rMtaZfqw/fR7AfSTwO+x/X5scJvTpLtgs2m9xHqGePxfHAjPwO9Xrv2IooixAz5Q1mk0GTwEH2ug5eNT8BP/Y/F3uVfCYhH7a+Q9k3t3vEL6HE1GTsVby3IeC/TlOMgui7fkv4afXIvBBVuO5l2KdQB+AbMonqk/FK+ofhk/0c4OfD0jzl541/3lYpk8X17v9FKxaBJnOrxr1orx+rKI+wBeIc1OKNGlp47gj2T7brH+8UrXsTGfS+Gtcvu0iDEIbwXZIF6fj7cu34TfP1/r+fBRhvviYH9J7IeH0riIG0veYwv3iu3nHJqMRk5p1OkWcfbGLyQWjG1qEj1vUZiB/BPa9PhJ6zL8AvXCmJ9iEL7c7n+9rf8lYzvL2rbj+7vglaDl8IrQdpX3cwf0KyeVbokyfga/HWsOvEXh9Iw41UFrl8YrFJdQ6lmQWaYD8ePSS1QepRW/pyfvou1LsQ3Mgh+7J+KtbJ8lWtPbXP8Xxfr/DN4Vey7gmIxY1f12X/zC6JhY1i2fMlJZ/5fhXaWvp3L7F/nddqvH7lfxC7h1S/tJy8HKYl4ewhPJq+Pnstti3qaj3rPYq/vus8CClc+MyoizO37b1zgiMY63uq0c72edA0rxJtKFgZTjsycCV5W257mIHmZ4C+pclEb5b1GmG/CW+vPxY8h6lTLl1AW+jidYl8YTcP8C9i2936O3X4tYJ8Q2uTGVATTxpMAQ4uI3Y97uwPfZS6kMDhxxcnqXLYyPA3NnxFyiWHel8uR24Z9IY/DqovdL+THUMxINTy3iGF4XfIrG7QVDKp/JSbocHctoN/wCaxCNc+7C+Lkl6/hL9wb4PqTZ52JZrR5lWi5zm7w75q8YjG+60vvL0EuP0SaxNsfPKb+h54DBhj8+eomc5YSf574Tf08f3/sijR4iQ4G9MuIcHfN2HnByafrgiDFDznZUWk63xDxejTcs7lN6f0hmmYr1vxuezPo9kbSP7Tr3mHQ0fk45DD8mbV16b2iUp8/6aSXWbXi98NJYLnOmxv6XVaYP40+/F+Cj/oN3Yfxm/D4Ez/AdSWOU203IHAUUWAk4I/4eGgeireMA8gRwbhvlWwc/qR0WO8kh+C0LRWZ53Vbli53xURqPUdsBrwBehV/U5T6ObTjeLW5evDL4HeIeJBqtQCvS4gkW+AnsKkoj0+MXthdEzD4fVdck3sHAzqXX8+Ct5meSOfo9Xkl4np5PqbiEUqtnTrnwk81swBfj9Wi8VbDo0rtPrMeRGbG69tQRvBt5cYI9Aq8A7hYxz6kRZ0+8ReUE4M+l6Z/CWwfnz4wzGD85fqI07WL8ouAa8pMAQ/HkzX74ReVd+EVOuTUo537iocSgRKVppwM7lV73+UioJjHXj/W0SmyjT+HHhNwkUF/r/2JqjDAdsV4lnipB4+Ktz0cnNonTLKl0D55UOqO8vDJifRavAG9aif9VvPdLbgJvMH4xsgZ+bJsc21b50Vw547AMo5QoxRPCD+O9n06hVKnsYP3/urxtZsTobb89Cq9Y5h67iyTncvH65NhXsh+DXFo/vR27LyUzURbf25VSLwa84rcsfj6eUCNOzr67XOb6/1lp/V+Et2ZdjScrd84tU3x/Hrz7/6Lx+sDYHm6I/XdsjVjD8PNRsd+dF7Gew2+byh0/ZVa8y3fxDPbH8cTJ9/GkbO4YQ6PwhOvCpWlX4Qm9R6lRYcfPkW/HNnQx3rixOpkXR6U4Y2L9FY9R/QTeA6roIl4n+TYET+BMxI9rd+J1kz4fw9gkztz4Mb9oXDgCr2M+jDcwtBwbqBTrqNgWD6bSuxE/Bg6mlx6IlWX9Co3bJZ7Aj90PxbaefY7De0k9V/4OfjvH43hDQ9Y2gNcn/x7r7he00Zsh4owlbiHD62H34beAnIBfZM6K12Fzn+wwC34t8JlYX7+IeC2fXFZZLyfSeKrIJbGNT8J7nuXWT8fGsi4SiU/jx8miB1XWeSA+OyvwYxp10rnx8+cLeC/FkR2s/xXw89wdNdb/PLHui+T/F/EGjzniddYTVEqxflxaLk/GvN0QMbPHlvgw/vR7AT7qP/i9wAuWXi8XO+qF+EXzX4BPZsZ6fzAoPIlQbn0bg3dVynqUSyXuuXiry5p4V/w/AGdnfrfoYntM7OyGXxBug7e8XEZ+l9KTgYvj7/nwCuT5NeelKM+++MlxTTyT+ThemTwS+GbNmOvjFfVyUmEwfnHxXfKeXT4Sb42ahUZL0p7AhfH3oeR14Z2p8tooVbJjG3m61UG7tJzOw7PHV+KVvuJAuQ/1LrrG4PclHx/rbYaYPieeqc6tSBbLdW/84naF0ns/JONiqTRvh+O9JBbHLwq+G3+fQl7X5CLOlvitMiPx0bvPwE9om+KZ+MMyYg3Be7fMTyM5tjXxmD48oXd0jTIVv4+n0fXyxZjH+2k98GXx/QtarP8+t6NSvLmZskfCyfixYI3y/8wo0170nVRaIKdM8Z1uDVr7cUoXaHgy8V28Ev51MpNmNBLJo/AKabGfrIxfVCyYW6Y+1v8D5A+g2Gq/XTkzzlJE98/StPPwBN7C8brVuAjFfrE/XTh24wn4V+nZojkYbzm/P2dZ48fXofg4RX3tu0dlxBpC4/Gbs+PHs5Hx+tOx/rMTMHRxIOWI94XYlufEk0HF2Az7k5nsimV1RiyfnfAkQjEm0gFkjhMVnz8RH4TT8PrA7XjC6STqJbvHlpb7eLyOcwl+TBiNH/taJnNiezyiMu0M4FPx90l1ljlehzsp/l4bH2vkUbxLfW5vnvXx4/SeNB+8OqtXH37R/aP43ix4IuBmom5D5kUXjccDr4jfFvDLWP4zxraV1QpciteNAb43J44B+MXkW3giqOgVsitNesM2iXMYMDH+XhnvzbEufl66msyBKuP7xbluZ+D4Utn+iV+wzlEj1qZ4EmEZ4K7S9J3JTAoTDSal18vhF/9Z55FKrCHxf1eN13PgjQSjY3rLZV2KdUgv6//kGuv/01Qenx7zVtRLDiU/8bIVjR4SS8c+swhe7z6JjFuxP8w//V6Aj/IPPQffq95ntQJ+kXppZqzReKV2eC/xJ5I5unST2OOAE+LvPfAM8+X4BV1utnU3PAN5Jj0HZds5DnY5z2NenZ5jJcxJ457VWWhxQRLfsdJ3J+InxXuAb8T0BeJ1reeZ48mRC+NAVm71fpEmg+H0Eac8EOc4PNO+ciy7cRnffwRvRZw1XlfH5Mge3LNSjodp46kjlW1w0dhufg0cHNNmoHJrRy9xFsAfxVbuqvl5vPJ3LN7KcUrN+Vo8tr0r8Rbl02P6SnhWuU6LUrFdDcYrSJvgFbn/o8ZFID0H9pwFv2hbH7+4yFn/M1Refzb2j6/jFxczxrSmA8f1sewfotQqlbv+e9kGy9vEV6iZwIvv7YBX+K+j5z3dP6RFUokuDlqL9wYqujMOpucgiLPhLRTv5ay7vtYlfiH2Q1pUJJnyPFKs/4kdrP/FYr99g+gWnLvf9hKv6Go7E96SPzHze0WyfERszzfR4bEbb9G8IbajoifODPio8C0r7fQ8H5Xvv5+17r7bJPaI0t/T4Um07PUW3+toIOVKrHnjOHAP3uhRDGC3MvEYucw4E+LzJxMJ85j+idhG+4xTrN+YtxfxhpJbaFx4fQnv3VNn3kZU1t9WeDLgZuBNMhtiYht8f/BLPGlyAZ7wmETmQJqleBPxnk+7xfa+HZ6sbllfKsVYAe918T3aGLw6PrstPce9GYkfV8q9cHKTCofEMr2AUvd/PDH/3ZztqLTtdTTAd2W9lY/dJwN/jBi/Ir+eW2ybG1JKHsU816qflL57Cn7c/mKsr2Nj+eXezjMC7+10PZ4Unj+mL4Q3EmRvS5W4u+G9X2r1nIrv7o4nyG6O7frk0nKaWCPOWl1a/wvjychhpXIchzeETMpd/5WYy1LquYXfPnhTbpk+jD/9XoCP6g/eYvNu5SBdPqANw+8Hmisj1vJ4xvh6vBWjfM/WIDybmFVB6iX+9Hjl42C829VaeFax5WMC6dk6vjVeGXmDxlgFF9Dm+A7x/cXiYNny/sb4/I54dnwQ3go4Bk8uFCeoa4mKSUaspSPeaniF7+P4YEO34BcStxC3oLSIsz7eArh2aVpRntNieR2YEWeDOLieRWnQs3hvKD5C76M5BzS69NQRvItleaT7OfFWhaK19DvEBUEfMTbFK5pH4q3l99Bo1dwTv0fwcDIrkHjLz+fxSsToWH8L0mh9vSZz3rbEE2RNu/7jrZwnZsRZLZZT+YkX5V4U75LROyU+fwNTDqC3Hz7ydrHPtUyU4CfDPSi1qtAz2VHnqTMH4xX02UrTiu17HJ6g3Dcjzpb4BegnS9OKx4NlJZXo8qC1eKLtdXr2MjMavYzOAr6VEWdk6e9BVCrVeItJ7v62TGXaV6KMxX34Oet/afzCb/V4PTfegpy935ZirRzfLT/2sFj/S+EJxpYtpbGsb8Qv+EfHtjMHja6qWcfu2N+OpDHo6Wx4K+QT+HH75px1Ft+9l14q1PE/svZd/MLoaHx8kkWptEDjPfnqtN53ZSDl0rawCt5de7bYFv9VLMPYHlo+CYXo8l1sg/hF0pX4mBhzxrLMGUz5SGCx0rrbDO9qXOxzj+SUpxTPaBxvq8nPP5M3kOrnqjHj9xyxXf0wc97Gxr5WtI7PjZ/T/03j9s7cp3ssEvvXSBpjAYwolS138Orqtljsu1vgPUyOpEbyJr5b3N/+/v6D111zt8luDfBd9LhrNjDoEDwZnDNw7WK9TC+W1U1Uemj1EWtpvK5WjFO1HX5L1TulbaBlz1C8QaL8eO1v4V37j8XPc/eTN07BfrF/NRuocGP8uJv1+EP8mD03fk1RJLcXL+27D2duk+UxV+bAG29+iycC6qz/OfD9f1Rl+lL4ceRJMo8l+HF7GSq3ktHGMffD+tPvBfio/uBd2F7Du3z9sthBaNzrNgel7twtYt1ZHBjwrPp2sZNvEtPWpd5z6xfDL/5PolHhnh/vUph1q0N8Z/PYKcutOItG3FfwRMC9tO7qulwc1C7Gx3RYmJ7ZyC9EvFZdphfBTxCn4tne4t7GGfBWhUWivDkX3J/C79U6BL9v8yG8y2PRJfDz5A2Yszl+gXMUXnlZKg62xUl/FzyhkFOmx/ED/LrxnX0q789VPdj1EqcrTx3BW+kfrUwrJyU2Iu92kMeIrnV4S89P8AukWt0jS2V6Ek/UPB7rvXyhvDReAWw1b1vg2f2jY1kvRM+LwlnxSk6rOJvjLf0n4wnEb9HzgnQT/IIrZ/1vATxZej0Pvt+uQ+Meypw4W+M9YopHX16JJ72KbXKFnPVfWl//wFsMD6VxnJuHevddFst7It46swieEJwTv/3iRPxiutUxoGuD1uKtUA/iLRln0uieXlQgRsQ8t9oG1scvlLendLEQ28EgfECwe9rd3/Du85vXWP/VY9uj9HxqwQZkDlpLI4FzPH6ee//Cr/SZnAHhimTp2UyZMBuEJwQfqbm/TaaSgIr/k1s53qyyvy2GJ6tXjjJtSMa+i7dcvRjb9j34LYY7xr4zKOI9mLPuIl5XBlKOz+6E7+t30HO8iSVi3zmQysCDvcTZNrbx54D9S9NXwhs7bgXOzIizH/C9+Lu4WJuz9P5awI2Z87YpPROm7/csiNfzATdnxNkTeLgyrXyeu7woc8Z6K57ucRFeT5oTP95uX42bsd7uJ8b0afL+F8kYvDo+ezWRMKGUfInXy+L1zyVaxBhPk/vH8XrwK/i5+NLM8nRrgO9t8HPbJPw4/XlKDXh4Ei1nwPFd8d5DB1Ia06P0/peK8maut8diO9i38j92zN0GSvvbDyr729L4rRk7kJfE3bdadkrHazyx++XMedsZryffgSclTylvN/j56/aMOO/vb5X1vxx+fjgsc/0Xjzq9Ofa1vSmNu4Dfcvh4jfV2f+wLhzd5fw8qx4hp8affC/BR/okNeiH8Hp638C5WLXeoSowxcdIoMq2v45W3ffHuSO1cdD2JZzGvxLtKLo2fSNai0e00p2L6HI37B+fEM+SL0ciYL0Tes+tfx7s3n4wnSk7GKwPlLsFZj3fDT4634if5k+JAfQGNVq7cbmTfpTFA1Sx4pflJMu6VrcR5lkZX229GuZ7HL5BGxvSWFVy8Unp56fXaeGv1J+N1ne6fXXnqCH6vX/FUga1j/n5NZstmfG8mvDV86dK0M6Nct5N5D3fpu0/TuLi6BK/E307pSQXkdeN/gHg8Ep4EuAJPLJSTcDm34DxTWkZz412vqy1CuffLHkmj0rErXqF4BU96LFWjTLeW5m0wXlF6BDgupk2fs/5L8U7BT9xHxzZQjPBcvj2rVSKgt+V9JJm3FNHlQWtjH9kgttG7ab9L689oHI++TZP7v8noct3H/nZY6TM56796bHuKOCe0MW/VBM5pEe+4mnGqydIperSQcexusr9dn7t/NYn1ZaIVjMbTGb4X67CY55EZcS6ncUwaitcLLqPUu4nMQWLjsx0PpByfmw7v2bRI7CO3xv52PPDpGuUZjh+HlomyPYEfC8o9MmdutW3ix6K7aJzT9sZ7AD2M39YxEk8ytrwtJMr0b+A/EXPe0nvlcvXZGyDiTKIx2OwGsdyPwRsDhuK3cvTZOBTL+jW8TjQmtq3vx/pbv/S5nP13ON4jYolYd7fgvZT2xc8LQ/FjYZ+DV0es3fCGju9R6UGHJ7umozRWVx/l+WWsp6bHa7yxI2f/7dYA39PhCcVl8YTuwXhD0+E0er8sScagwXgr+wN4IuM8vC62In68KsZGajloKc33t8Px8+WBNHr3tEpQNtvfjmn1vcxlfRJ+7j2azKcyxXeH4Q0Bi+O9cFbAGwBuw5Onhvde6HNQ3Zi3J2iMC7QyPobLl8hsgC0t61/ijZPL4Mfsp/AekEVPvs3I64XdbH87iMb+NgRvpJviCWDT2k+/F+Cj/BMHsnvi70XjAPcOfp9Tj2x5HzFmwitqV+PdUCeV3lshDnB1Hn91IKUMP34CyMpAVuKMAW4ovb4fr6jeQeZj5uJ7qwL3ll7PFWW8nsYzXutcLC+BZzhnxk/4b+EZ4eXJf3TSMPyera1odJO/DG/Zu5MWJ9hSrCVpjKA+HL/AWTcOuDcAV9eYr0VoXMhOFwexI/CTeJ2Rk7vy1JGYtxfiAL0efiG/WWyTj5B5j3rE+hp+wt4ZbxUr9pn9ychEl+KsTQziGevxbTxzPB5P7OQMnmixrs6O7WhlvGfBinhvgGti3eVcAC5FDNYX66t4WkDR22grMp+hHp/fDL/wWjJ+zxf7y8n4SW6KZz1X5y1+HxfLeY7Se+PwfbhWwiy+O39sQ7PgvXheje3h87QeO6PY385ssrzH4QmY68l7nGLXBq3FLxJOL70uWshPJHoYkXcv8Oz4frokjVumzscTurPhXdX7HDm9ND8/Ysr9bUW8UpjT+lss67Pp8NgW31uJxrgk5QTO4rF95g7s2ypZOjhzWS9FJI3ofX+rc7vLGvjF5KrE4Lv4eWUPvKKcNd4BfrF3Fz2fWDAPfkFxPjW7k8f32x5IuRRjPHBr/D0T3qX8IDy5fwWlJxK1iHMo8di6eL0Q3ipYdMdenBbnXhrHpoPxLs3F4M5r43WnC6lxLzfeiPOt+Pss4K+xnIr/sxG9dGOvxNkHv3gbFfvgc/hF4L6xjJbLLM8C9Kx3TYcnmibENlBnXJBDiScB4A05f4zl9iX8fJ7b87W4eJs/yvEYjdvm6vQuOxCvh4zHj+O34y26RU+uxcjrwt/NAb4XLbbt0rRP4hfM2U+dKu0bN+C3W0zAExxvUWMsgBb7WzHeSbf2tyVokaDMWNYX5S7riDMzfl1Tvu1xFjzpkv3EGbwn76/j79nxetupscyvrBFnBeCWSvmujX235bmyyfIuBk9ve3+bFn76vQAftR/gY/jJaul4XdxffH1sgIOIARAzYh2MZz/H4hdXa1Cq8Ee8O2qUbXAcQMqZ6DUpddXBLzZzehUMxVvtvoJn7E+M6cviJ6bPZpZpHrxC/GVK9znhvSV+TP4jNYsD5Cz4wX99vAL/M/yC62oyxqsoxdsWvyfqCPxEdkdM3wK/pz/3fv5ipOy56NkSMRyvwI+qUaZm4x5MjOXXsnIUn+/mU0cWifX2c+CS0vT58AvBnIvuYr1tjSdxdivKh5/8s+51js/PTWTA8YuAXUrvrYT3WOhz2y6V51N4q8gVwEOVz9xPXivZDLF8y2OebIxf/E+Hdzf/RKs4lZhfw/fhHo++xLsa5nZTnwfvtbMdfqFdtIyMiW0++0kxxHgA+EXqBLzF5Cd4Jekc8isAW3eyvOnyoLX0vD+1GMhpPH4c2bjmOhtSKeem+HHlIuB/xL3+Gct5Qfwisu39LT6/A34+OpzOjm0zFOsFv2hsK4GDV4iLHnhDaT9ZOiOeVCj3bCvvbz9uY3/bG79Iuoeex80nyHzaSOwfJ+PnylVL8zo44tbuQUF3BlKeCb8QeSP2sSI5ZHgL5Wnk9VRcjsZYHMX57p5YFzPh56fc59dvircGnknPnjcb4Of13FsCZiZ6FZSW1/34heDB+CNOc27Hmx1v3bwnyrV/6b1vxHLKaRiaLrbDu2L/O5FIosU6yx5NP8pUDMq8PT1H5z8Ob/HOKdPJxG0Isd8dTWUA7cw4s+K3DwzFz8GfxY/ll+L1qBeoN0hsNwb4ng7vwXcpPZ/uMyLWwZY197dlicYN/KL3F/g574Sa22Q39rflieMYjXN3u/vbl2JZn9Xusi5951T81q5Nm0z/eua8zR3r7Yf48Xr/0vZ5MRnjncTnZ8STvmfhx5RjaCTh7iAzeROfn4UYCLqT/W1a+On3AnyUfmJneBqvlL+IZwoXxru4P18z1px4l53i4moo3sr1KN499QC84t7yPv74fnGhNBY/uRbZ48Gx8y4YB8o6rebzxcHiXErZ2jjI5tx3WdxH/nH8pL0jfvtFUYE/F9g1I84gfEyK4nujI94LNLqm5mbth9IYJXdjPDu5NY3nap9ERksQXmHYp7cDDX6Bc0lGnCXxk/OZ+EXkzpX3Z47lndMC0JWnjuAVsi3j71H4xXr53sSJxOM/M+brHDzLWz0JLYHfCpN7kVyet/e37dK0o8l4BGll3hbDL26Owk9K8+Hdga/K3Ud6+R+X4cmNlgN6xufL9zSugFcEv49n7XeK1y3vK4794pOxja+Bt5BegLfmbY7futDynuJeYg+P7fBlGrdB5XQnLc/bIrG8j66zvJnKg9ZW/teOeBfhlq03+MVj09tjYh08TN4xoEiWFBdrK9GzC3fL/a1JzM/hx7YtaQwElnVsi88u2mSehlTK1DKBg1eAF6ZJpZz6ydLZaJ50rbu/rUojUTIv3mL/KH5BeyiegL0hM9acsU2Pje+di48TsC9+jspuEKjEbXsg5fj++9sl3mtuC/z8Mi6mn0uTe4Uz4hb1jFPw1tezyBi4thJjY7yO8w4xBkKsv6yej8Ci5fLQ89ywIj7OUsvb8srbN34MOYGevbrOI6PXWyXmHrHff5lICONJypYDzMVnp9gXKuU8N7dMeJ1vptLrWfBzQq1H8zaJOxQ/d34B+BuZT/mhywN840mFo/HEzw6lbfsaavbKxZMmN0a5foY3Gn6C/McNFvXHYn/bEk+4trW/MeXYV53sb5/rZFlT6mWJX3DfjjdQLRfTrqTyuNVW84TfPn0JpSc64Qn4nMd9F/W/ufHGzu/hPQKLpzVdS/7jVPtsPKizv00LP/1egI/SD36CPyL+Pjx2gNPwruXFffS5rQe708ger0qje+SVeMvmwWS0bJXizYhnZ8v3qhc73m5R9km0fiTbzLGjrhmvt4wd/xE8C3gAGdl/POP7NH4xMBhvLT0fP/h/C2+J/SWtu0x/DM84X4BfZBVZ5EOA22quv2XwC5LLKD3uKt4zvBWu5UVuxPkN3mK0UJM4S+EtSi2fOR7L9WC8J8kW+K0B91MaW4AWXd3jM1156kgs78n4CWN4NRZekXi51bxV5uvTeCXrARontKUoDajVxrxVB5Z6idaP5Cvm7SJ6XpRugfcAuDLK2WofGYMnRIZVppefNPJuqzjx2SXwCvDeNBJmQ/FK9954Qu8IWvR0oXHRfQMxjgte4domYlwR6zSn50Uxf8W4JMX9tgdX95vMeduXnoOcbVZzeXdt0NrqvPWyPW1Ki1ZlPPHzRHUbKL0/Mz6wXqtjSXm93cGUFcklM/e30fRx7sGTOS2PbfHZFfFeaM1GBS8GdW2ZwIn97aHY9k5v8v4IMpKlsQxuwI+3V9DoHVhsn9fU2N+arje8W/iW+LlpF1o/Tndp/ALkFPz2j+1i+pp4QuES/HyZ25LYlYGU43vrAEc3mX507Es34F2NW93LvQR+bDyWSvIwylvc+tQqztJ40uY7NHqYDcMTua/iiZcbWsWJ722JN5A07RlDY9DfVmX6GF4HOxu/7WJ45f2N8S7iufN2BU3OrXjPi5ZxKvPW9PYv/Lg5KTNWbw0d08c2lvPkkqXxhPb5NDn/xLz9MrM83Rrge0n8Vrtv4T2fVsDPLyfh55JbM7ftYt7OLeYNP+49QY1zXGm9PEKlnobfQnFHjf1tOfzYcSGVwctjWeXub8vH/76x2Cbj+9+KfePCnGVdWm/vxX5S9JZYGq+TvIqfh1sOOFuat8vxW/pGVt5fL3MZLY8nfa+l+VOnNsUbeOvM2xk06dkasbL2t2nlp98L8FH5wVsynqBxX+pP8BP0Z2JHnaLC1CLeWBpddE7HKzELxIHkeGp0S44Y1+OZ8Nvx+wA3Kb23HN71Nuckcg1+8X4PjcGvpsfvvZ8QB6q1MuLcgrdCfLU0bSx+MbBLLLumj+urxLkvDkRz4VnNZ2P+lip9JqvbFl6ZOhC/QL0Iz0LvR+Ne3tGUulH2EedW/BaVw/GT4YrlsuD3iefM25x4a0G5a/twvIJyFRktwKXvdeWpI3iXva/ilYjniS7JNC4q16DS26CP+Zqtl/mq1TrS17zhyaoJwBfbmLfFSu/Nj7fojMyI8+NY77vgybdqS+7y5N8StAR+Yn4Z73FTPM3hEzR6+OScHO+gcdF9ViyTS2nyqL+MWC/1Nn80WtFzBuCqztuWMX0l/Fi3cKvlTZcHrW0xb0WlJKfL7V00Hpe2GH4sOYGej8Rs2WOCnsmSM/EWoGtoJEtWbbW/xedexnsgbV2aVrQmD8Zb4HJvL7uNxvg28+OPadydOH5QeUZ7H3HuxhPQi8U+txGeUF6z9JmcZOm9eOvcyvhF6Fcr769QY3+7s7Leiu6yq9Tcju7Fe6jNFPvaP/H6wXLV5Z8ZrysDKcfnHqXRwGHE8RY/v60f21ifo/rH55/Dk5qn4PcVX0DpnFTeB1rE+X5sB5fjj3pdq/TeELznSe68PU2ph1Ssvw1ptJZ+goxENd7DdAJ+YfQifm4oBjIdHOt2uxrzdkXM29qV9z9N/rgAvc4bnhw+iLzxWIpkyRl4gmsMPRsXNoz/lfNEnX3xC9NjmfLRfNvllKe0LXVjgO+f4o1RF+J1wa/hDTxj8J65G5A34HR13ook3no0Egy522R13tbCL+DXini5+9vrTDl4+WY0bs27m7z97Vn8loar8V4289A4bw/PXdbx+Xvw2ybOoXKMLcVq2YjaZN5Oje17ePx8jbw6XDFv1+LJoPlj+ykaY3am8gjYuvOGHzOnx/frrO17Wvnp9wJ8lH5oVGpnpuf9fyNjZ18oM04xMNylcTC6m1KLGF4hyOpqFZ/fEq+QDI+dawe8e9MdNFoEWna3wrtGPYC3HE2IHW0X/B6iOuMTbI1XuIrbQa4is6WmEmcWvHK7cmX6zrGM1qgRaz3gvtLrN/FWpIPxi52taizrB0qvv4YnJ1q2jvUSbyJwXmXaSDxJVfdiqaOnjuD3Qz5cev0NmrR01Zivy3qZrylGeM8s24Idztsjnc4bfvFyN55IvBVP5G1Oowv1gmRcJFVifhq/R3V9vDVrMvDdGt+fl9K92jQuuvfHW8n3rxFrWWLwvsr8zRHvL0zmUxni81s1mbf7a3y/a4PW4hdpd/Sx7hYirwfHKniCaw0aI9cXAzH+gPxjyZyxrEdW1tuX8WRJ1n6C3y7xAH67xguxfZaTnGPJHNE71tPPaXTVvQ2/B/y0WHbjM+MsSM/j7at4BfAI/JywTWacdSk9si+2z6doJDrnJb9y/An8oq+63o7Dz8NbZ8YZE/tbORl1HJ4kv57Mx1aWvtuVgZTjuwcDz5ZeXxjbxiu5yzy+9wVKt2vgdYMf4hfQxRMtcnqE7E/PwdN2pf1zyijgytLr78XyvhA/r2Q9SQM/h99Zev3JiHUbNS4gmszbbrQx6G3GvH0zZ1mXvltOlryAt5BvUPlMq5bgrxC3x+G9Pl6kUV/dsea8dWuA7w3peUwZhzfGZY/p1ce8XY4nWbL3kV7m7bt4I9hteONF7ng1fQ1e/qWY1vK8iyedbo2/p8ePu9fjPSha3qbcJFaxnHbEG1Kynw6TOW+75myPfczbdTFv+3c4by9Sc9yNafGn3wugn/crPU+3+d3tYsf4Pp5Z/kzdWPgJ8qL4u3xf+TepNxr/DUSGHT8Z/RjPrp6KV75GZsZ5lJ6t/hcBO5XerzMQzM54RbTaJXEv4Gs14ixJ49FQG1PqRoonYI7OjLNWKc5gvLJ+LZ6cyO4aFet63Yh3O95yumPp/QMojYyeGbOjp47gGePVSvO2Et7qlj1qLj7Y4Rp4l9ZL8JbTXTuZr/jeZr3M2+WZ87Yh8ein+Pz4uvMW3x1K6T5mPMP9EN66vDRe6c4ePTlirAQ8GH8vht+a8hp+m1LLpx/E98oX3eULihXwxGDuRfcixABs8XqPyvz9IGf+iMfI9TFvl7SaN7o/aO16LdZd7rx9Dr+APAGvQJYvBncm/97UpWK93dDLestKluC9jzYuvT4ylvFZ+K1w95E/EOvH8B4Sh8b2dF1pfR5HYxCtnMHc7oz1fBHw/dL0z+Bj0OTEWIq4H5ZGj52ziWQrfl7KSizjCaNj8STwffS8GMxeb/H5S4knqcR3H4np55DZQhaf7+ZAyobfCvi92F+uiGU1HD+2/xxYMrNcq8R6K+4N/3xsV5vF9pHzVA7Dk2NrleZ1GeCZ0mfOo97j667D6yOHEo9BxXs7XUf09MmIsQTeMv3JKNPn8AvTNfBu8zlJxUF478a1+5i382vO2/V9zFvuYHVb0iJZQov6V8zL12g8fvEYGhdym+H11exH6NG9Ab4XwBuqNqPnwKxb4OfyBTNiFPO2aJN5+1TMW9Y+Upq3u5rM23L4rVC5reVzx3rqbfDynN6lhvdMKdbbUcBN8fcaeK/qrEFr8Z5Dx9NzoNrtYv9YN2c7Kn1vDB0OzB7ztkeLecs9DwzGz90Llaa1NW/T2s8gpF+Z2Qz4SWBimyGuwbt034ZXbJfBM5t1PAnMZGb7p5T+V5p+NDCnma3U15fNDcUvsh82s+F4RWmdlNKV+IHyT3jrZKs4w4Fvp5QeNbOhKaXJ+AH3EDPbFSCl9N9WM2RmC5rZmvgF2mjgV2a2T+kjxUVhS2Y2Du918W6sr0fw2xUKI/CL1FZx5gf+DrxjZjOllP6XUnoDT27MCnzbzGbNiDM33n10X7z3x77xc6CZPW5mR+NdLk/LiPUxM9vIzJZOKd0PPGRmW+GV/yPwXh5vR1lTH3GWwtfxu2Y2JD7/LH5BN5+Z7RXbSF9lGYFXqM/A94dj8IuHvc3sMTM7Lne+It7mZrYOQErpzpi3z+EnumLe3sqYt02BP6SUno5Y/0spTcJPIlnzFnH2wffP75uZRazz8d4Pb+AnpD+mlO7OiWVmq0aMZ4GLzGwHfL0dlVJaEPhFSumfGXGWw5f3s/h29d3SR1bCLwr+k1Gmr+AXEj8qpqWUzsO3gTdj/v7Sav7M7GB8JPBhpXn7PL7ejo55e7WveTOzOfHu1k/HPnY23itlPTN70swOwJNTh/cWoxLvADyB8HRp3s7Hx5j4VY15OwCvwB6Pt1L/hJ7H/hnxxEmr8hyIV4qOojFGzAOlj6yE9y7rc72Z2fr4bXKzmtnMMV/H4hcShj9Ob7qU0qMZZVofr7Q/jN8iNxN+DiGl9C7+6MKl4nVf+9sGZvbZmJ9/4xcTd5U+Mif+iMVeY0SchfDj/DNmtljpvHEPME/8j/dSSo9lzNtC+Lq5GUjEiP6lj+Sut4XNbAU8UVI8YWJhfHsAv+1kxVZxIpbF+fpC4A0zK+pzjwOzxDnwKLwL978zYiX82HtQlOmTeM/Ef6WU7sLX6zI5ZcMTwQYcY2bn4S2Kd8dxeBDe6tinKM/1+KCSxXH3ReA/ZraSmR2BN1K8m1km8B4O/8K3w9njXPU63hV+jswYr+Hrfxt8gO098ST3Y3gvoT7rFbGs38OTgD/vY95GtJq34jwSdgL+gyc3q/M2V+a8/RT4t5l90swGx/d+iCeldjezWfuqf5W2ydNSSj+N+tyT+PG8OA/fTxwHcsQxbA/8wnI+fP2RUvoRnlCYPzPO63h9eUNgVTObLeqYt+GJgFX6+n5p3s5PKf3MzIZV5u32mLeP1Zy3ffDlPB9+6xMppR/iF9HzZcZ5G090jAM+bWZLm9mwlNIj+DJqeUyJ/e0S4GdxLLkVr5sQ2/Y9+DhhOeX5L3BSSuk1MxsSk6/Ge5XsZmazZdbjB6WUfoNfQyzQy7ytkFmm82KbHEQkyCrz1vL4HZ//H3BMSukXnczbNKm/Mxof5R+8srM4bXZPbBKv1uAf9LwvbmW8K+mLeOZvKH6C/Skt7t+iZ6+G4j7ikZXPPE+L0biptFrQc1Tw9fELnS1o3Zo8N36geRRv/V8Tz2Y/hycDzsQrPMtlLKO58Qra9/AWs+0r78+On3CXzYzzCF45/kJ5HeAnoZvI6wZaHtzzKBpdNvfCu+JvSl7WtitPHYk438cr2z8inixC4969YtChXm9bKdYp3mpzBF5JuxNPlFyEX8ytT/7TOEbgF7GT8B4qS+G387wIPFlj3kbg3cd/gF+ULhvTi3sKP91q3uJzc8a+tEBp2mB63qr0buY2OSd+ITquNG0BPFtfZ70VZSpn2ofjlfjn8dbKrCfF0HjqzLhi3eO9Fcojqv+t1fxFnNdpPEllBvy+1j8BT9WYt64NWhtlejWW7+3AfDG9PDp8y3UXcX6BH3tuxJOIM5a2pVliuS+bWZ5X8IrMnLEtXR7r89Cc9Ybvtz+M/eNufOyO6ek5mvo7rcpTiXUefhwYh997W9wyNzN+/G01b0Wc8/Fj9+J4QvgxvFfALjGPreKMie/cgCcgr8OPLfPg3cNfwhNBS2XMWxHrRvwe3ovx1vaF4/3ZMtdb0dJ2E54I/jZ+vB5TivNL8seq6MpAypVYHytNK8edJ9ZLq4E0Z8RvTVskXm+IJ07Gx+t58e0/6/aCXv7HVjQGqqt9qyA+psdE/Lj9nViXPyHziUERYzh+PlqxNK8L4ceIVoPEztBkvRXnv1rzhu+vI4nH3OEJn7bnDU8CHY7vf7fiSaRi7JNLaPH4Snw/n4uePbnKt/aMiX0lZ3yYbg3wPRc9xwA4CD/efR2vM32JvMG052LKwVjbnbdymQbjt/legvd4O7bGvM1HozfvEDx5fwE+bkmdwcvnp9JCH9uClf7P8+QNFN4jVmUZDcXPWQ9Vl2WTOIvj9dsZY962iXk7uea8LR77wQxdmLfF8frpFLel1pm3afWnWKDSj0qtAx/0/z0Tv8g6MqX0u5j2Zfwg+0P8xPdaSmn/jDjD8VsIJse0wSl6O5jZqfgtB3u1EWco8L+U0nvR8v5WSumiFnEuBX6eUjrBzD6F9wAZn1L6S/Ra+CfeEvyLvuI0ibUJXgn8VPJMp+EHtZlSSoe1G6f0mYVTSq+2iDMvXjleP6X0DzP7CZ51fxm/cPpbSunoVvMVsS4GXk8pHWdmh+MXpX/DL6BfSindZWbDU0r/qhlnfvyA/Qu8VeoFMxufvFW/VZnG4yfDQ/H1dAleqdglpXRv5nxZSimZ2X54peT3eHfEB/GWsm1TSo+3mrde4myOt0osB+yeUnrHzFZMKf2gRZkuBn6aUjrZzBbHT47L4RXHO/GWpdVTShdmzF811rZ4Rf0V4McppXtrrLcizlJ4192F8HvMl8Yff/V88paAVmXaHb/dZSfzHk2H471xRuKJwEfwW0YuqBnnCHzMi7mAV1JKh2fO21j8uLa7mZ2O3wv8EL6Nrwscn1L6R6v5iliXAM+llM6KY9RrKaVvx3uD8ETcqimli2vEOTvifCveG4wfd+dOKX2tZpxfpJROjff2wo+hk1KLXgVmdg7wRkrpG9FzYl18/38H7xr8J3z8mVP6itMk1lfwXnPv4ImWe/GE6VwppT1rxlkLX//v4C2/PwQeSt7S2VecsyLOSWa2DF4BfQP4dUrp67EM/5dS2i1j3sqxlsOPT6/jCcsT8OTV2IxzQDnO8pU4J+KJ78VSSme2KlPEux74L94TZF48yXx3vLccngT9SrGtZsYqLuIOTyndF+9Nhyfh30jeeyUnzkg8cXN4cfyIFt1vAn9NKR3RIs5W+MB3bydvzS+mG35+eQlvJTwpY96miGXeG25p/MJ+HH6rwfOZcX6XKi2QcRz4JvCPjHm7GF9ng/Ht+RT8+J/amLeLaVxw/SD2m1H4LZp15m16/DbDuVNKPzGzj+O9g95JKb1q3kPnNvwWjcl9xLkGP4bMg4/tdHvpvVnwuthrrbajUqx38VvWTo/z2vT4sWU0fk54ptX5yczux4/7B6SU7ohpi+CNU8PxxO7jKaWbM+N8OVV6osX2dBpeD8qZt/vxdXNAcSwz71X3CTxJNAa/zavVvN2Hj53xndK0GfFlNCZ+Hk0pPVQnTvmaJNbbiXjd+5iMeZuiTJX3R+DHuWdbxHkYuD6ldG5p2lC8gWo0vo19L2PeesQxsxEppb+2OW/VWDOnUi8iMxuJN6L0OW/TrL6yDfqZdn/wHglv4y1KP6IyngBekRtJi3sdm8Q5uPL+x/AWzz7vBcyIUyS/Wt2/Nw+eVR9bmnYWce803gq0QV8xWsQ6kxhQE28Z3IjWmdZWccbllik+39vgnqOIwSwzYnTlqSN9xPl0xKk1xkDE2IXGCL6v4dn622jxzN8mccbHcl8ltsOn8GTJZzqM8wx+kZM7QvVc+MXZt+L1rXhiY/tYRqfWKEtvsXaKWKd1GGdHvGKcPb5IfL/ZU2fG4hep55M50GQvcYokwIVkPL0GujdoLZ5geYbG/ffr4sm7Y6r/s0txWh3feotzQs31NQRv5TshXr+Et9ytjrfYnU3+KOWtYp2GXzy1mrfe4qyJX7QflVMmGuMKlMfLOBW/FfBq4vGx5I0r0CrW3pnrrVWcYkyH3GW+JV0YSLmXWBPwJMcdeHJhGBmDqTWJs1PEuYtoaSVj4Gngi/jj2K7AE7gjm3wm99hbjTWKFvtqZpwpeqRlbk/b4rfyzIdfPF5HpScR0XuxjVjXk9HDrZdYF0dZbsZ7b5VblQfh55bjWsSoDsp9VmwDpxCDn9aYt24N8D0Yrz/8HO+d+GAsr8/h9bfcsYGqcb4b87QrngSas8a8VWMV6/ALZDxhorKMygPOfgE/xpxMRmt7H3E+j/dyORuvuw4m46ljLWKdRY3BZvHecuU4x8Qyv5mMXmV9xJmI15Hvo9HbdO0uxGprv5vWfvq9APrppxXvF/CHxt9r4hdsTxED3MQBr+VjB3uJ8yRRAYkDbs6AMK3ifDWnPPHZpSk9mxzvlnhV/H0b3rKcu5z6inUHMeBXh3FurVOmPv7HutQYkJPuPXWktzgjIk7LBEcl3uJ4BeIHxAVyblmaxConJ17H73u+lfrJiWqSY3/8/uesOHil43r8Pv7HS9NH4ffvjatRlr5i3U3mYzX7iDMLfqLMjdPqAv575D0GtVWch6nx9Jr4TkeD1uIXucXFUJHUXAo/juwwwOJMqLlsFsdbSO8BnihNH4zfnpX9KMQWsZ6k8qSdNuIMwROXq2XGKW4huxxPRDwX08fhvZ7e7/LaYayL8QuunAEi+4pzEZmPZY3vbEkXBlLuZqwuxjkRT5DsgO/zZ+HJ3KHxfvagtX3EKrqbZyWXM8qU+2SWW/HehcXrI4FzS6+ztu/MWOtlxqkmJm6gcvsOeY/2621Q7m/iSaU623dXBviO7w/Bb3eaGb+V8g08OZT9WO1uxulWrFhGl+L1rINjuewVy+g2SufONuN8M+LUGQS9VayZMuPMiycQFsRve70F721xFN7wuHSHcY6IOC0H4qwRK3swzmn1p98LoJ9+XPk9xygYimeCH8RbmV/tUpzXuhDn5Zrlqd6zNQK/H/c4ajxOr5uxulmmXuLPgPfs2KgLsWolJqZGHPxe0ltpPB+4dqtSfK8ryYkuxvkEPZ+j3sky6kqsLpep46fOdDnOILzXw+F4YuPrZDxnvpdYRmOMmG3wBGzW/e4DPM6ceC+Cb+A9V9ag9FSF/ojVjTj4LUX74Mnx4j7wzSk9DeODjtXFOHPg54/9K9NnxCv1K3UxVu6jPlvF6XPbjO15UOzvxWNmZ8VvK3kEvzA5kYzHxtaIdd8HGGcoPoDf0vRMBj4bf+9F5VHJHcTaE7gkc73dSt+JiT6TwaXyrBWvhxNPMInX0+Fj17TcJjNjfafVtlSKVySOjge+EX8/hCc5fwd8vgtxfk+9x4V2VKZYRkPwcReOxXsSvEn03IhldhUtGvJqxMl9ikLHZarE3AO/WD+cUnIMOIkavUz7iHMyNR4X2s0yTas//V4A/QysHzxj+i4dXpgOwDin4hngtbqwjLoSq4txuja4J11KTHQjDtEKQYeP4KF7yYmuxCnFG4Z3eexGEqgrsTqNQ5cu4LsVpxqz0+VciXckma2AAz0OfivYyXhL4AO0GHztg4jVzTJFvBH4WBq1eidNzVh149ClgZS7GauLccq9GoZV4i6CjxX0H/IGCe1KrKkVp/Le2fjAlU+TNwBuV2KRn+S4tEZ52h6Uu5uxqBzr8aTWiXgLdzFvq9GihbpbcbpcpvIymg+/HWz3ymdyllFX4nS5TOX9ayH8NpxX8VuNFsEHDW85oHq34nQ71rT+0+8F0M/A+sFbBm+eBuMsjg/E1o1l1JVY3SxTxOv0ArcriYluxenmD91LTnQrjuEXzHt3Y713I1Y3yxTxunIB36043frpdD8bwHFG4LfBjBsosbpcpsF0qRWpW7HqxsHH3rmA0j3S+PPZf4L3Drid/HFUuhJrKsQZXZo2jMaF5YnANTXnraNYXY5zIaUnANC4XWJnvGEhaxyUbsWie4mJojyje4l9KnBOzXnrKFbEOb8SZwJ+e9+ncsrSzThToUwXVva38q2B36yxjDqOMxXKdH5l294av3XiDPwifuIHFafbsab1n34vgH4G1g+VR9lNK3EiVtcuTLoVq5tl6uK8DagLHf3oRz/6+aj/0KWBlLsZayrGqQ7KPAs+0G/OoKxdifUBxlkeT770OXD1VIjVcWIiozxZg3J3M1aTOIeU3qszMGBX4kzlMh1aeX8xfIyWusuorThTuUyHVd5fCr8e6LPO3K043Y71UfgZhEhJSul/KR6pMi3FiVjvtf7UBxurm2XqlhRHyoESR0REAH/6zh54D4BVzOwpM/tsvLc8fnH4vw841tSI8wkze9LMPh3v7QD8LGU+6rWLsaZmnC3jvY8BX0ylx89N7VhmtjI+Ls3/gPvN7GCAlNJ/4iPP448N/kab5SmW0crAiR3OWzuxynFWL5ZRSulNMzvQzJZoozydxJlaZVqtsow+ifcGqruMOokztcq0ahxLPhPvbYzfDpJTZ+5WnG7HmqYV90yJiIiIyABlZkNSSv+Nv4fij4rbAX+825CU0sIfdKwPIM4YvEv+Ql2Yt1qxPoA48+CD9C3QhXnLjhUJhXVSSiea2Zr4I4NHAyenlG41swnAyymlZzssz7CU0oJdmrfsWE3ibBdx5oo47W6TbcWZymUqb5PTdbCM2orzAZZpaJeWUXacbsea1imhICIiIvIhZGYzA7/BRyy/dyDEGmhxBmKZ+nveupnk6EZ5pnasgRZnIJZJ8/bBx5qWDOnvAoiIiIhIWz6FP0qxGxXbbsUaaHEGYpn6dd6KZEL8/R/gcjO7mbhQ+qDL8wHEGmhxBmKZNG8ffKxphnooiIiIiHwImdlgYMbUhbGGuhVroMUZiGUaoPO2HbB1SmmrgVCebsYaaHEGYpk0bx98rGmJEgoiIiIiIh9hulASkXYpoSAiIiIiIiIitemxkSIiIiIiIiJSmxIKIiIiIiIiIlKbEgoiIiIiIiIiUpsSCiIiIiIiIiJSmxIKIiIiIiIiIlLb/wOyB+8Un/zBkgAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABBQAAAHQCAYAAAARXjmZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACW3ElEQVR4nOzdd5gkRfnA8e97dxw5yknOSYIEiSoKCioICKIiqCQRFMSAWURBJImCqAgCgmIgqgQziAH9mUDBLIoREAFzTlC/P94at2/Y3enenbvdg+/nefbZ2Z6Z2uru6u6qt6uropSCJEmSJElSFzOmOgOSJEmSJGnBY0BBkiRJkiR1ZkBBkiRJkiR1ZkBBkiRJkiR1ZkBBkiRJkiR1ZkBBkiRJkiR1ZkBBkvSAExHfj4gdpzof81pEnBARv42I30xhHnaMiNun6v8/WEXE6hHx14iYOdV5kSQ9eBlQkCQtUCLiFxGxc9+ygyLiy72/Sykbl1K+MCCdNSOiRMSseZTVeSoiVgdeDmxUSllxqvMzHdT9+bfa0L4jIk6f1w3uiPhCRPwzIv4SEX+OiG9GxGsiYuF5+X9LKb8qpSxRSrm3kY/nzcv/KUlSPwMKkiTNA/MhULE68LtSyt1dv7igBlFa2qyUsgSwE/As4ND58D+PLKUsCaxEBnn2BT4ZETEf/rckSVPGgIIk6QGn2YshIraJiBvr3eO7IuL0+rHr6+8/1jvaj4yIGRFxTET8MiLujoj3R8TSjXQPqO/9LiJe3/d/jouID0fEByPiz8BB9X9/NSL+GBF3RsSZETG7kV6JiCMi4if1DvebImKdiPhKze9lzc83vrczcC2wcs37++ryp9THPf5Y71hv2LdNXh0R3wH+NlpQISI2johrI+L3dVsdXZcvHBFnRMSv688ZY92Br+u0buPv90XECfX1jhFxe0S8qm7fOyNir4h4ckT8uP7foxvfPa5ug/fX7fP9iNhq3J1flVJ+BHwJ2KSmdWhE3Fr/x9URsXJfnl8cET+rj5C8JSI615FKKX+rPWOeAjwS2K2mP6P2WvhpLTuXRcRy9b1eT5kDI+JX9f+/rpG3Uctv43uzIuJE4DHAmbU8nBkR74qI05r5q+t9VNf1kiRpLAYUJEkPdG8H3l5KWQpYB7isLn9s/b1M7Tr+VeCg+vM4YG1gCeBMgIjYCDgLeDZ5J3ppYJW+/7Un8GFgGeBDwL3AUcDyZANzJ+CIvu88CdgS2A54FXAu8BxgNbIxvF//CpVSPgvsCvy65v2giFgfuBh4KTAH+CTwsb6AxH5kI3eZUsp/m2lGxJLAZ4FPAysD6wLX1bdfV/O3ObAZsA1wTH++WloRWITcdm8AzqvruyXZKH59RKzV+PxTgEvIbXo1dX8MUvfXY4CbIuLxwMnAPuS++2VNs+mpwFbAI8j9+Nzuq5ZKKb8Cbqz/H+BFwF7ADuS2/QPwrr6vbQ9sQJaRNzSCQWOV3+b/ex0ZPDmylocjgQuB/XqBkYhYHtgZuGii6yVJUj8DCpKkBdGV9S78HyPij2RDfyz/AdaNiOVLKX8tpXxtnM8+Gzi9lPKzUspfgdcC+9a7+U8HPlZK+XIp5d9kY7j0ff+rpZQrSyn3lVL+UUr5Zinla6WU/5ZSfgGcQzYqm04tpfy5lPJ94HvANfX//wn4FLBFqy0CzwQ+UUq5tpTyH+CtwKLAoxqfeUcp5bZSyj9G+f7uwG9KKaeVUv5ZSvlLKeXrje1yfCnl7lLKPcAbgf1b5qvff4ATax4vIYMtb6//7/vAD8igRc+XSymfrGMFfKDvvdF8KyL+AHwMeA/w3pr/C0op3yql/Ivcr4+MiDUb33tzKeX3NRhwBqMEcjr6NbBcff0C4HWllNvr/z8OeHpfL5E31jLzbeDbjKxnl/L7P6WUbwB/IgMUkI9hfKGUctek1kqSpAYDCpKkBdFepZRlej/c/65/0yHA+sCPIuKGiNh9nM+uTN697vklMAtYob53W++NUsrfgd/1ff+25h8RsX5EfDwiflMfgziJbEA3NRt4/xjl7yXGye+YeS+l3Ffz0+xFcVv/lxpWA37aJu36euUxPjvI73oDCZLrB+Ovc3MGi78Di4z2uEbDI0opy5ZS1imlHFO3Q/+2+Su578baNpNZv55VgN/X12sAVzQCYD8ke6+s0Ph8/3r2tkGX8tvvQrL3B/X3BzqtgSRJAxhQkCQ9oJVSflJK2Q94KPBm4MMRsTj3710AeVd5jcbfqwP/JRu8dwKr9t6IiEWBh/T/u76/zwZ+BKxXu6wfDcyrgfrmyntEBBkkuGOc/DXdRj7mMTBtcrv8eozP/h1YrPH3dJiBon/bLE7uu+a2Wa3xerz1GygiViMf4fhSXXQbsGszCFZKWaSUcsfYqaRxyu/9PjrKsg8Ce0bEZsCGwJUTWB1JksZkQEGS9IAWEc+JiDn1TvUf6+L7gHvq72Yj+mLgqIhYKyKWIHsUXFrHG/gwsEdEPKqOS3Acg4MDSwJ/Bv4aEQ8DDh/Sao3mMmC3iNgpIhYiZxv4F/CVlt//OLBSRLy0DsK4ZERsW9+7GDgmIubUZ/HfQDZWR3Mz8KyImBkRu3D/RzymwsXAwRGxeR1M8iTg6/UxlJ5XRsSyNRjwEuDSrv8kIhaLiB2Aq4BvkONYALwbODEi1qifmxMRe7ZMc6zy2+8u+gJCpZTbgRvIngkfGeNRF0mSJsyAgiTpgW4X4PsR8VdygLt967PqfwdOBP6vdkXfDriAbHxdD/wc+Cc5oB71+f4Xkc/93wn8FbibbLSP5RXk1IV/IQcf7NxIbauUcgvZrf2dwG+BPYA96ngPbb7/F+AJ9Xu/AX5CDk4JcAI5yOB3gO8C36rLRvOSmsYfybELruy8MkNWB7F8PfARct+tQ44p0HQV8E0yIPIJ4HyAiHhMLTvjOTMi/kI26s+o/2eXGgSALHdXA9fUz30N2Ha0hEYxavkd5XNvJ8dl+ENEvKOx/ELg4fi4gyRpHohSxuv9KEmSRlN7MPyRfJzh51OcHU1CRBRyP9461XkZtoh4LNmbZI1ipU+SNGT2UJAkqaWI2KN2a1+cnEXhu8AvpjZX0ujqoy8vAd5jMEGSNC8YUJAkqb09ycH6fg2sR3Y/t6GmaSciNiR70KxEPoYhSdLQ+ciDJEmSJEnqzB4KkiRJkiSpMwMKkiRJkiSps1lTnQGA5Zdfvqy55ppTnQ1JkiRJktTnm9/85m9LKXP6l0+LgMKaa67JjTfeONXZkCRJkiRJfSLil6Mt95EHSZIkSZLUmQEFSZIkSZLUmQEFSZIkSZLU2cCAQkSsFhGfj4gfRMT3I+IldflyEXFtRPyk/l62Lo+IeEdE3BoR34mIR8zrlZAkSZIkSfNXmx4K/wVeXkrZCNgOeGFEbAS8BriulLIecF39G2BXYL36cxhw9tBzLUmSJEmSptTAgEIp5c5Syrfq678APwRWAfYELqwfuxDYq77eE3h/SV8DlomIlYadcUmSJEmSNHU6jaEQEWsCWwBfB1YopdxZ3/oNsEJ9vQpwW+Nrt9dlkiRJkiTpAaJ1QCEilgA+Ary0lPLn5nullAKULv84Ig6LiBsj4sZ77rmny1clSZIkSdIUaxVQiIiFyGDCh0opH62L7+o9ylB/312X3wGs1vj6qnXZXEop55ZStiqlbDVnzpyJ5l+SJEmSJE2BNrM8BHA+8MNSyumNt64GDqyvDwSuaiw/oM72sB3wp8ajEZIkSZIk6QFgVovPPBrYH/huRNxclx0NnAJcFhGHAL8E9qnvfRJ4MnAr8Hfg4GFmWJIkSZIkTb2BAYVSypeBGOPtnUb5fAFeOMl8SZIkSZKkaazTLA+SJEmSJEnQ7pEHjWHN13xiwt/9xSm7DTEnkiRJkiTNX/ZQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnRlQkCRJkiRJnQ0MKETEBRFxd0R8r7Hs0oi4uf78IiJursvXjIh/NN579zzMuyRJkiRJmiKzWnzmfcCZwPt7C0opz+y9jojTgD81Pv/TUsrmQ8qfJEmSJEmahgYGFEop10fEmqO9FxEB7AM8fsj5kiRJkiRJ09hkx1B4DHBXKeUnjWVrRcRNEfHFiHjMJNOXJEmSJEnTUJtHHsazH3Bx4+87gdVLKb+LiC2BKyNi41LKn/u/GBGHAYcBrL766pPMhiRJkiRJmp8m3EMhImYBewOX9paVUv5VSvldff1N4KfA+qN9v5Rybillq1LKVnPmzJloNiRJkiRJ0hSYzCMPOwM/KqXc3lsQEXMiYmZ9vTawHvCzyWVRkiRJkiRNN22mjbwY+CqwQUTcHhGH1Lf2Ze7HHQAeC3ynTiP5YeAFpZTfDzG/kiRJkiRpGmgzy8N+Yyw/aJRlHwE+MvlsSZIkSZKk6WyyszxIkiRJkqQHIQMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSpMwMKkiRJkiSps4EBhYi4ICLujojvNZYdFxF3RMTN9efJjfdeGxG3RsQtEfGkeZVxSZIkSZI0ddr0UHgfsMsoy99WStm8/nwSICI2AvYFNq7fOSsiZg4rs5IkSZIkaXoYGFAopVwP/L5lensCl5RS/lVK+TlwK7DNJPInSZIkSZKmocmMoXBkRHynPhKxbF22CnBb4zO312X3ExGHRcSNEXHjPffcM4lsSJIkSZKk+W2iAYWzgXWAzYE7gdO6JlBKObeUslUpZas5c+ZMMBuSJEmSJGkqTCigUEq5q5RybynlPuA8Rh5ruANYrfHRVesySZIkSZL0ADKhgEJErNT486lAbwaIq4F9I2LhiFgLWA/4xuSyKEmSJEmSpptZgz4QERcDOwLLR8TtwLHAjhGxOVCAXwDPByilfD8iLgN+APwXeGEp5d55knNJkiRJkjRlBgYUSin7jbL4/HE+fyJw4mQyJUmSJEmSprfJzPIgSZIkSZIepAwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgYGFCLigoi4OyK+11j2loj4UUR8JyKuiIhl6vI1I+IfEXFz/Xn3PMy7JEmSJEmaIm16KLwP2KVv2bXAJqWUTYEfA69tvPfTUsrm9ecFw8mmJEmSJEmaTgYGFEop1wO/71t2TSnlv/XPrwGrzoO8SZIkSZKkaWoYYyg8F/hU4++1IuKmiPhiRDxmCOlLkiRJkqRpZtZkvhwRrwP+C3yoLroTWL2U8ruI2BK4MiI2LqX8eZTvHgYcBrD66qtPJhuSJEmSJGk+m3APhYg4CNgdeHYppQCUUv5VSvldff1N4KfA+qN9v5Rybillq1LKVnPmzJloNiRJkiRJ0hSYUEAhInYBXgU8pZTy98byORExs75eG1gP+NkwMipJkiRJkqaPgY88RMTFwI7A8hFxO3AsOavDwsC1EQHwtTqjw2OB4yPiP8B9wAtKKb8fNWFJkiRJkrTAGhhQKKXsN8ri88f47EeAj0w2U5IkSZIkaXobxiwPkiRJkiTpQcaAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6syAgiRJkiRJ6qxVQCEiLoiIuyPie41ly0XEtRHxk/p72bo8IuIdEXFrRHwnIh4xrzIvSZIkSZKmRtseCu8Ddulb9hrgulLKesB19W+AXYH16s9hwNmTz6YkSZIkSZpOWgUUSinXA7/vW7wncGF9fSGwV2P5+0v6GrBMRKw0hLxKkiRJkqRpYjJjKKxQSrmzvv4NsEJ9vQpwW+Nzt9dlkiRJkiTpAWIogzKWUgpQunwnIg6LiBsj4sZ77rlnGNmQJEmSJEnzyWQCCnf1HmWov++uy+8AVmt8btW6bC6llHNLKVuVUraaM2fOJLIhSZIkSZLmt8kEFK4GDqyvDwSuaiw/oM72sB3wp8ajEZIkSZIk6QFgVpsPRcTFwI7A8hFxO3AscApwWUQcAvwS2Kd+/JPAk4Fbgb8DBw85z5IkSZIkaYq1CiiUUvYb462dRvlsAV44mUxJkiRJkqTpbSiDMkqSJEmSpAcXAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKkzAwqSJEmSJKmzWRP9YkRsAFzaWLQ28AZgGeBQ4J66/OhSyicn+n8kSZIkSdL0M+GAQinlFmBzgIiYCdwBXAEcDLytlPLWYWRQkiRJkiRNP8N65GEn4KellF8OKT1JkiRJkjSNDSugsC9wcePvIyPiOxFxQUQsO6T/IUmSJEmSpolJBxQiYjbwFODyuuhsYB3ycYg7gdPG+N5hEXFjRNx4zz33jPYRSZIkSZI0TQ2jh8KuwLdKKXcBlFLuKqXcW0q5DzgP2Ga0L5VSzi2lbFVK2WrOnDlDyIYkSZIkSZpfhhFQ2I/G4w4RsVLjvacC3xvC/5AkSZIkSdPIhGd5AIiIxYEnAM9vLD41IjYHCvCLvvckSZIkSdIDwKQCCqWUvwEP6Vu2/6RyJEmSJEmSpr1hzfIgSZIkSZIeRAwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzgwoSJIkSZKkzmZNNoGI+AXwF+Be4L+llK0iYjngUmBN4BfAPqWUP0z2f0mSJEmSpOlhWD0UHldK2byUslX9+zXAdaWU9YDr6t+SJEmSJOkBYl498rAncGF9fSGw1zz6P5IkSZIkaQoMI6BQgGsi4psRcVhdtkIp5c76+jfACkP4P5IkSZIkaZqY9BgKwPallDsi4qHAtRHxo+abpZQSEaX/SzX4cBjA6quvPoRsSJIkSZKk+WXSPRRKKXfU33cDVwDbAHdFxEoA9ffdo3zv3FLKVqWUrebMmTPZbEiSJEmSpPloUgGFiFg8IpbsvQaeCHwPuBo4sH7sQOCqyfwfSZIkSZI0vUz2kYcVgCsiopfWRaWUT0fEDcBlEXEI8Etgn0n+H0mSJEmSNI1MKqBQSvkZsNkoy38H7DSZtCVJkiRJ0vQ1r6aNlCRJkiRJD2AGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmcTDihExGoR8fmI+EFEfD8iXlKXHxcRd0TEzfXnycPLriRJkiRJmg5mTeK7/wVeXkr5VkQsCXwzIq6t772tlPLWyWdPkiRJkiRNRxMOKJRS7gTurK//EhE/BFYZVsYkSZIkSdL0NZQxFCJiTWAL4Ot10ZER8Z2IuCAilh3jO4dFxI0RceM999wzjGxIkiRJkqT5ZNIBhYhYAvgI8NJSyp+Bs4F1gM3JHgynjfa9Usq5pZStSilbzZkzZ7LZkCRJkiRJ89GkAgoRsRAZTPhQKeWjAKWUu0op95ZS7gPOA7aZfDYlSZIkSdJ0MplZHgI4H/hhKeX0xvKVGh97KvC9iWdPkiRJkiRNR5OZ5eHRwP7AdyPi5rrsaGC/iNgcKMAvgOdP4n9IkiRJkqRpaDKzPHwZiFHe+uTEsyNJkiRJkhYEQ5nlQZIkSZIkPbgYUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ0ZUJAkSZIkSZ3NmuoMCDhu6Ul890/Dy4ckSZIkSS3ZQ0GSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHVmQEGSJEmSJHU2a6ozoOnphw/bcMLf3fBHPxxiTiRJkiRJ05E9FCRJkiRJUmcGFCRJkiRJUmcGFCRJkiRJUmfzLKAQEbtExC0RcWtEvGZe/R9JkiRJkjT/zZNBGSNiJvAu4AnA7cANEXF1KeUH8+L/acTDL3z4hL733QO/O+ScSMM10bINlu8HghU/f/OEv/ubx20+tHxIkiRZLxkxr3oobAPcWkr5WSnl38AlwJ7z6H9JkiRJkqT5bF4FFFYBbmv8fXtdJkmSJEmSHgDmySMPbUTEYcBh9c+/RsQtU5WXeWR54LdjvRlvHlJab4yhpBMHDSedTGyIaS246QwzremWzjDTmi/pDLV8L7jpDDOt6ZbOuGl12vsL2Lot4OkMM63pls4w05pu6QwzremWzjDTmm7pDDOt6ZbOMNOabukMM63pls4w05pu6YybVsd6yXSyxmgL51VA4Q5gtcbfq9Zl/1NKORc4dx79/ykXETeWUraaTmlNt3SmY55ctwUzT67bgpkn123BzJPrtmDmyXVbMPPkui2YeXLdFsw8Tcd1WxDMq0cebgDWi4i1ImI2sC9w9Tz6X5IkSZIkaT6bJz0USin/jYgjgc8AM4ELSinfnxf/S5IkSZIkzX/zbAyFUsongU/Oq/QXAMN8nGNYaU23dIaZ1nRLZ5hpTbd0hpnWdEtnmGlNt3SGmdZ0S2eYaU23dIaZ1nRLZ5hpTbd0hpnWdEtnmGlNt3SGmdZ0S2eYaU23dIaZ1nRLZ5hpTbd0hpnWdEtn2GlNa1FKmeo8SJIkSZKkBcy8GkNBkiRJkiQ9gBlQkCRJkiRJnRlQmIciYp5NMxoR7rsHuXlZvibigVwmh7mtp9t+m44sS/PXsPI0jHR6aUzH7TQsETFzqvMgSdKwPGArbdNBGfIAFRGxfEQ8NiKWLaXcV5dNqNI13SvsD/DK5KS2fUTMgsmXr4hYNSJ2mGQaQyuT9bsz6u9JDRgbEXMi4iHDKufDPJaHsN8WGVZeett5WGkOoWwvBtArS5NMaxjle+jnyVJKmez5LSJWiohN6rTMQ8nTsNKJahLJLDGMPE23sl3T6JXve+vfE71+rxgRa0w2P9NZRCz2QK4HDNN03E5DuIb3AovDOO6m1faZl2V7uq3rMDwQ16nngbRu07pRuSCKiKdGxMsi4vKI2KexfMYkG1rbAh8BngtcHRFLwkilawJpvz4intj3PzrnLyLWjoinRcQ6tXE52YryDJjUevXS2W4y+ehLa62IeHRErN5YNpFttTJkY2mS5eGYiNgvIlac4Pd7PkCWpW0n8uV5UCYBXhkRFwCv6qU3QR8B9ppswzQidoqIYyNiz4hYdzKNk3qcvDsi9p/kcXJMRDy8L+2JnsuPjYiLgZdO8vw0rLJ9dt1GmzTSnmhakyrf1VDOk/V7O0TE6yPiiY1jZKL77VJgu1LKvyf4/V6eto6IF9RyPuH6QEQ8PiJOjojdSjWJbJ0fETtP4vs9061sQ1/5nkRw6QrgKxGx8UQzMuxrdyPd5SNi0cbfE77OARtGX2+OYdUxFmTNbRARMcyAd//2nsD394ecPr7+vVD93XW/PSwilpjMNXxY9cl5YGhlOyLWi4g1ImJTmHggtp4P1o+ItSfy/UY6e9XfQznOhl2+J5iHmc31GUY5iojVImLhqV63YVrgT6zTSUQsDZwB/I6cMvMNEfGpiNiulHLfJAvOCcAHSikHAd8B9omIcyJiX+h2EomI7YE9gRvq3zO7ptFI52JgB+C8+rNnRCxb3+900EXEo4GTIuLTEfHYieSpprMP8La+ZRNtBOwJXAQ8D3hOXTaz6wUqIp4GXBMRx0XE8r3y0PXiHRGrAUcB2wPPjYgn1+Vr1pNe2/zsCfwVeA3w6ojYrLduHbIztDJZ//fTgd2Ay4FdgSfVC9wWzQpqi3T2JGewOb/+vUOtOO8bEUt1SGc/4E3AbcDBwEeBAyNipfp+6zJVy+TLge8BLwAujYiFJnCMPBV4VCnlu41lSzd6h7Q+p9f1exzwfuDJ5LG7dURs13F7T7psR1oWWB9YCTg+Ik6ob28bEYt0TG/S5XtY58n63acBJwP/Bd4V2XtisQnut12BGaWU99S/94yIIyLiVRGxfId09gHeTtYDTgNujIjdGu+3PZc8HTiR3N7nRMRZMcGGd91vS5dSPlv/nhMRD4uIVTruu2lTtms6bcr3Qi3TegrwR+AdwOERsUxd3qUMDfXa3Uj3EOBdwI8i4jkw4ePl1cDWpZQfNHpzrDWR9CLiEXW/71y/P+GedBGxWb2eTCZIOSn1vNGrfzwPeGf93fxM1+vKPhGxO2TvmehrQHVIZzfgwoi4KSL2run9p/7uWg4+SR5769W0ux5zQ6lPDkvU3mRDLtvPB04HjgZeEhHviIgtJpC3w4B3A2cDB3epJ/Wl81LgCBhaL8NDGEL5nmQe5pRS7q0B5VdExOJDClBdCLy28X+mS7Br4kop/gzpBzgQuLJv2VHAXcAbyUrgRNJ9DPDRxt+/IRtzzyIbO0d1TO+dwPPq6yeRje8fAS8FZnZI58PA0+rrNYGvApcAL5zgen4beCYZvf0xsN8E0/kSsHN9/Qhgf+BFwB4TSOs7wBOADcgG5Yvr9jq0Yzo/qPvsBOBjZIAiGu8v1yGtN9ft/HqyIfBm8uK7SMdtvWN9/Q7gPVNZJhvburffTgE+X9frDGDvDunsB7y6vn4p8BnggrrfXtYhnY8Bj2v8/RHgqppOtE2nfvfGXlrAQvXYWbfx/uyW6fyksY2eXffdr+vvTueXWoZ2qa9PBq6t2/ztveN6Csr2k+u6PBI4Ffgc8FNgyY7rNqnyXb83lPNk/f63GvvtXLLidkPd1l33267AWxvl+xrgDTV/7wRmtUznukaZXKTu+28Ap3XMzzcb6SxH9p54SNftXb9/J/CE+vrwWkZvrdtpswW5bI9Rvj/ftXzXsrQjsBh5fjxrAtt5qNfums6S5Dl8XeCJwPuAfYFdgE3rZwaeN8nz4yeAR9e/X0k2dr5ABkFW7JCnI+rxcWxd56uBx0xw/Q6vebio7sONJrqtJrGNtyd7Xj0NeEk9hg+qyxYFNmh8ttU1qu63fwN/A74IbNg1jcbnV6/noKNrub4OOLR3TgEW6rDfPlX322snuK2GUp8c0n7blmwDPGaIZXtx4OfAisCcus3+j6ynPqPt/gOWIq9rqwLrAZeR16eu16Ul63lk3fr3Y8mA/ovJG0Uzu5Sn+r1Jl+9J7rfdgPvIgOuO9fVngf0bn+ncrgOOrNv8amDfqSqXQ99eU52BB9IPsDzZcNm+b/mawHuBh04w3XWB9errRwOva7z3KLKS1CUQcAh552YxsgL5BGAn8sJ7/qC0gAAWBs6pJ7El6/LzyYb79cCRHdfx5cBHGn8/CXhX4++l2hy4dd1+AMwCHlJPcOfUk/l76VAJIO8mNRvNt5EV94PJSvMuLdNZGHgh8DDy7tTT6gnyEuDhZMDjpR3ytTLwunoCfyTwM/JCsj+wWovvbwa8sa/cfopsMK9al427rWuZ7l04JlUma3laGnhJ/XsJ4BdkEGcWcBjZkF+mZXobk421p9Z1WrYufxzZVXjNQfmpv0+u23mV+vdHyYryh4CXd9hfa9a0FqZWqOr2Oa6+3h94dot0HgZ8DTgL2Luu4+PJSsCngDPbbu/6+zXkxfF55N3llcnK/HPJSs+456u63xZplO0VJ1O2yTvlS5CVmYfXZd8igzEfBrZpmc5mwLGTKd/1M5M6TzbS2Ro4pb5eDLidbGStVtM6u21ZqmmsQjaQDyQrOivW5WvV7b5Ji/02i2wUPQNYvC7/YD1G/tfYbHmsnVFfL1T34QeBI+qyZwNPbpHOTPJ89iWykvwy8py2LhmkOIk8/loFTcmK+mTL9qJkxW/SZbuvfL97ouWbbJCc0vh7xVq231LLVjBORZuRa/e7GdK1u5H2qcC76+t1gX+QgZj31W22fIs0eueml5HBiJnA18kG2Wo0gnwt0lqcDMCuVvf9rnX9vkHWN2aMt6360loSuKWWoznAW8nAwuy2aQzjh6zbXEIG2e8BnlOXf4pskP6gruMaHdLcCzi5vj4R+HMtD73zwp4MOKf0pXcQ8Pz6+o3Af4Avd/j+kmQwYC1gnbrdzwOW6pDGUOqTQ9xvPyXP2SeRPTBfNpmyXdNco28dFyLPvc8m67qtgpTkTc8P9m2bjwMr1L93YECdqX7uWOCO+noVMtB8PFn3+RCwVod1Wwz4LrDxZMv3JPfbFWQvgvcDfyDrX6fWc8gnyB6s59XPtj2XLFXTWZmsT3wPeFZ9r9ONiun2M+UZeKD8kBfqmWTU+LPkxXAWsHB9/4vA0yeZ/oz6s0hj+YuBSzqmtShwJvB8+iqzZKW71V0XsnH1nnrSeDfw2bp8CxqVnpZp7QfsVF/PBh5KBgOWqevc6q4XsGE98VxS98MJdfkSZAW6S0NwqZrGL8hI4uV1+SyyEfXOLicA6gWMrNBtQDZYPkZGPbftUA5m1f/9PDKC+j0yyPFOYOWO2312/b1MLRMvmEDZnFSZZJS7Fv0XjFoWVumQZq/y/3Vgh8byG4B1WqbxWLIicw5wJXBRXb5ZXd7qbkv9zvJ922gr4NN1+XeBrdrsKzJ4cBh5YT2r8d4a9dhdrEOeFifv3pxQv7ty470b+vfBgLRmNfK4fpey3b8dyUeLTiIbzN8m73odScsKCXMHbnq/O5fvun3OYPLnycUZqZxvCOzWeG+Vrvutfu+JZM+bW6h3o+rybwIPa5nG3mSg7gSycfSRunxPsudTm6DLbLLSv1hj2S513y8BfJ/s3jsonZl963Z773iry4K8izewUVo/vyIZMHvTRMr2aOte13VC5+2+dPYle2Ad1LV8123aq1P0zt1bkNe71vULsoHVC7RP6trdSHMTatCX7Dnzqvp6oVq+ntkijV5AYddadi4Aju7L9xW0OPcCy9b/u0SvjJEN5R1quq2vleS19gPN8lGPnbUbx0zrRvcEt+9DycbZpvUY+WrdFhcAvyLPl08lz1nHd0h3YRq9f8iAyZXA3fX/3dr2uKvfXww4p7G/rieDgb+lxXkOOIC5A8Irk9fbp3bIwzMY6TU14frkkPbbGmTvsc3rPvpI3RbXkz05diJvyrQu2431uoi8DryIvEnRa9x+ANi1Q7l6VD0+FqnLLiF7Uy1J1qGWbZHOsmQ3/u+TgbzDG++dTcveT4wEc88igy2PB34JrF/f71S+J7HfFiGDxr3AyiPrsXA32fP5LPIRxi+13Wc1nTfRuPlD1nfeT8e6+3T8mfIMPBB/yIvL18iuQy+rB8A3J5DOKtQu941lM+vvqCfa79GyAtlLs/7eg2wkf49soDyMbCRf1zKdxervXckI5wG9fJB3hy7rkKdeWg/pW/7Rui2PA97bcdvtTQYWVmssO4UaYGjx/YUbr/cFdia7Ki9Vl50AnN4inZXqNlppjPevoEV3bLISsXjj71lkpPTHwJPqsjVapLMdWXltbpdemdqJfDb3oEmU/VUmUCYvIy+yi/bKdt/7p1IrKB3SnE3eUb6YrEicXX/e3+K7OzBSMX4keQfn0b3yWY/nd7XMx0q17NyvKyMZjPsMLSrwzfJDNio2A+Y0lp1Myzvd5EX6xTQCNOTdylPIivOZwIUt0lkdeHozHxMs25eRd20WbSx7Ddmo3K3+3arB3V92+t57fNvyzUjDZl/yPPkdJnaeXLiWgSUby2Y3Xp9Eyy7rZCV5x8ZxsjZZqfkleRf4POCCFumsWo/1hck73i8lK4+9ng5nUHsdDEhn+Xp8LN1Y1guavp+slL615bp9qB6fzTLQTPfNdD8H9M5rz69l+5AOZftYxumB1rZs18+uR941XJ/aMCOvmbcDu7ct3+Q1YOEx3nsGOXZTq8f6yOvHvnXfT/jaPUbaQd9jNzR6rQz47l6N148mA/p/B3YnG4IX0QgwDEhrBhk0uaXuz3Oo9Qgy+H5Qh3VajDxvLsRIkPJs8jGIJcgg1TxvoNZ1mll/9xr7N9Xf/yGDeY8gu4oP7M3D/a+1zcd51ieDZm9ouX0WYyTYdRh5l/su6iMUtKufLNSXh1l1XQ8hG6kHdNhfc/qWTbg+Ocl9NpPsZdULum9I9ij6Wd1n/yLPv13K9vKN1/vXbf08RuotH6HRoG+7/xvLjyCDIO+jEdwZb781Xm9LjqOybGPZ+6k9UFuk1QuUvpoMkF8AXNx4v3X5Htb+a7xemHwM8+56bHyHvO7+hEb9fEB6S/aluQh5Hf8xEwhQT6efKc/AA/mHvAPx0npyHXiXZpTv70J21/w92f2zFw1fsZ5knw+8omVaq5JR2fdTo3tkQOIo8sL43vr+ugPSWZG80/9ustvfsn3vrwTcQe2q1CJfjya72D1plPcew0gXxaUHpHO/ChlzX5i2Ii+8y7TI0yPIgW76K0Wn1bycR94hHjdqS97h/mbd5reSleJHNN5ftu7XQek8rJ68jiYrlb1uqi8G3t6hPD2a7GJ7ERkhv9/jEWTld9zKLWP0yiAb8YcAr+yQpy3JAMSH6OtqW8v4JnW/DdpG65FBpFX6ls8hu2W/kmxELTognUeSAYjRek1E3Rc/psXd6cb+/0Bz/zPS0DmdfG51UNl+eN3/r2SUboz1/e+1LNu9MnBxLQNr1OUrkL0VLiQvbuN2l2xspw+Qx/uKfe+3Lduj7n/yjsnz25ajAWUg+j7TtTfAamSD5J3kHc6B58n6vW3IBtF5dZu/qa9sb9Nhv60B3Fv3zUuALevyxcjK6gsYp8HZSGc78o7TxcDHRynfm7Qp32Sl8Yu1vHypv7yQDcjfDSrb9bNb1ePj/YzyeATZK+C7LcpSb/+v1rd89Vq239uybG9f91fvUamZfe8v06Zs18/uSFY631+30+nkXeQ9u5Rv7n8NWKQ/b2Tgctzz2yjHQ/N1p2t343tbk0Hqi5i7J0gvKPdE4Ost0nl6XccLeuWGbIy9mLzreTotgkGjpPvEWh73YuTa+WHaN3Ae17edekGzPcjz30W0aHRP5odsWK9VXzfH3lmdrJPdDnyhLvsQjUcQx0lzW7J++l7ypsfqNOo85GMr3225fT9EdgF/amP58Yw81td2XJfTqecA+p65Jxvd13Ytn43vt65PDnnfLU3WBd5GNvwPJ69vm5K9X//YpWwDTyF7Nx7EKI+B1HLZ5ngbc/+T9aZbgW+0SOdRZE+nzzLKdZEMBg5Mp/H51zDS1lmVbICfBzyxS/me5D5bnzwfrUP2/lihHoNB9iT5BTnuyEa9fLZMt9db6n7BB7Itdjodx4uaTj9TnoEF/acWvJ3IRsvQu7zVk8a69SD7E9mQvaLxftvndj5INmi2JytVm5KRzd5d14VGOzmNks5F9cS4O3nXp9mtaRbZVan1oEfknZ4ra96W6a0TeTFZhnyedmDQpJ5wTqYO/tT3XpAVyrbBl49T717UfKxUXz+knkyOGO3/jJLOR6jPxteT9WnkBfHQxmeWaJHOMmTj7QtkZPZIsvL8TEbuCAx89IK80/rM+vrEmp8f1BNZmy6kc2h0BWakYtVf4e4y8M415N27zckL/XP69tsytHhEgbwL9S0yqr0DHQdL68tP73m2TcmLyjuYu5v6wHEq2ux/siI48FghzzHfA24mB/LZn+xG//RaPrenPi40wTLwfXLgrC7PqF7bOEZOI89PHwMO7li2+/d/c7CjXoOk1bOuo5SBZRrvtTk++u/WzfWMdd1fs9tuJ/JOxqFkhXJTcgC+nwN71vcfzyiB1HHSO5NswL+J7LVzOC16JPSl8ZnePiKvA4eT57v96rJlaDHOTN3/vQEr30UGEs9tpLN585gZkFavcrx33Ye7N/cJWZlrk6fx9n/vkZM2j3GMNxhnr8Ld9vnk9zNyDugF8N8LHNYsZy3SWYa5rwEvJOscRzJyx7zLeXdG39+9a/f2bdNofPfGuv8vIBuRD2UkaPpQ8to+8HGMWqaeRXbdvt8Aeh22+VqMc/6v+/SrLdPajwxoN+/A9s5Ls8hz57e6brOO23cj8jz5AXIA3n+T16TV6vuLAX8hbzxdTaN+OCDdHzIyuON363ef3Hh/W1rcMSV7Z+xKjk9yCXkj7NFkHbF3vLQp408lx924Gli9WVYZ6QY/8JG10Y6DmsYyZG+uVvXAIey3DcjA1bvJXgjvIm8CHl/L+CvIYMKatBwfqqZ7BBkUPpG8JjyBvDZtV9/fncZg0l33f6N8v5N2Yzt9E9iHvJ6cTQY/lybv5i9c17PtmDx7k0HFa5k7OPl0cgyDj7Ut35PYb5uR15EryTrJb8jrZC/IsTQZTD+1/t02WLZzPYavJnvJPJe5e+FtRuOxqgXxZ8ozsCD/kN27v0ae7C+oB9RrGIkkL0RG7ycyCmjvoH5m7wCqBe6P5J3Fl9VlbU7Ua9K4gDIysM97yAti28EFtwFubPz9uHrA9QbnW4pulZr9yaj2EmSX5+sZGTStt/6PapHOc8i7Ya8mK7UvpnHHtGOeDqHxeArZ/fd6Mvq6V9t9V/f9GWTkttndeQfyudlXd8zXzuQFd2OyQnpXzde4g3A18rNkPZntRwYGfkOepB9X1+29g06MtZxfMVZ5IbvcrdFhnXZi7kGF9iIrzfu2Ldv1cxuSUevHkZXX95FR5W3q+1u0yRd5F+GndR8tQV7U3lH31ddp+Yxy2/3fdjvV7+1B9grYjrw7+nvgMx2+P14Z2Ils/J5Pi9kmyDvKlzb+vouslOxOdv1r+/z9WPu/F9BpdaEeUAa2ru9vNqgMkBXOOdSRpBk5B7UO2jW29VLkoxyP7HvvqfV4e2SbtPq+uyVZiVyLrAj+iKz4PIl2vRxWojG4KVm5fh3Z5f3ntA+4Dprh5aUd1ml34OrG3weRAY7esTusc8DDaT8Gx3iDcV7Q5hjp5Z3s2fJ+5u6ivBV5DJ/B5K4BdzJyV3rQILqL1p+H9i3vlfFO1+7G919HPRfUcvnjmrfvUp+Z7v+fY6Tz2kY6zyEbO8/pmp/6/ZvI89nz+pb3AuDb0H6A168wcmf0UbVsvIxaFyBvVOw/kXx2WJ8rescmGdT6Qz3mfkoOPrg2dRYkshE7bi+l+rmn9R1325ABz08Ah3TI22sYqZ/OILuCv5dsjL6TbrMWfI48t7y+bvfezCBtz7ubMcqjpcwdFB44VtEQ99vHGRkj4avU4BPZa2kvMlA5kRlaliADFUdRe6mSdfrWd+0H7P/WM5jV9euNLbY0ef6/iGxwd+5F0CgDJ5PB8/6ej2u1Kd+T3G/nAC+qr79K9sR+K1k/ekUt10c1zidtr1G9wZj3JK9zb6v7brv5VSbn9c+UZ2BB/qmFoTc67gZkV6QTyQrNcmRlaUIXxb7/81qyEnI6OcvAJjQqdC2+vxh5UXoDWfH7XuO9g4BjWqazI3U6wMbB9G5G7sC8H3h8h3x9jBoBJyvh7yDvvnU6YZCVrIPJbsH71YP/bGoDkLyb13YAtVeS3T5fRDYG309Wtp5TTwitKiI1rY3qyWnXWh56d23WqSenVutZt81aZIV2tZruHWTD5IO0fHazbptLyYbO/zWWzyCjpqOmw8jdgavIivqFZOBmVbJBugx5MTmw437bgJEAUu9OxsFkw2DUMSfGSGdxGmM21O19Lhm9fwZZ+dqzZX56x8jngQ/1HSdvbJunFvv/vR33f++5wd5jF3eSXRIvp+Wze5MpA31pPKSx3zZh7kcVVicbXW0qtUPZ/8MqA3W7XEn2drrfuBDkHYU1O+TpYPL80f9Y2PMZeeys013lWp72IHsA/IKsfL+bFo9g1DTeRV4LLmHuIPNmdb3bBJTWZPwZXrpMXbkVtQcSecd3oXr8XUq3qRSHcg5opHUGkxyMs35+mbo9DqvHyqzG/7iMFr14euWESVwD6na4pv6+3zWaDAg+uu16NfbXy3vHRN1vH6yvH0s2oDZskc5CtRyv2li2JxnQ7TXm2zYqX0j25NqFDJ5ezNxjULVq4DIyG8ZZjPRuuYGsgx1DXr8Hrttkf8jG42XM/SjBa2q+bgO+TPZcvbRjuuvWdHci64cHkvWmzWs5aVun2IqRG2ivAM6vr1eqx3Cr4ETd/1c0ysOJwEn9+2TA/rqJuWdA6e9xts94aQx5vy1HTum6BRnI/yxZv309+ejTI8kg6DdoMQvOKOk/jpFZsV5LBvXPqWWjzbV30P5v80hnkDcF16x/H0sdo4qsr3ybDj2e6nH10fp6bbKu2WpMiSHvu5eQdZDF6zbZmLwx/Kd6vB1MBk2e2CHN1Zg7CL9IPXZeRrYXF+jZHf63XlOdgQX5h2xk9lc4NiUrMr0K44QLCiON9seQEfvb6TCaaF9a25EN+Bcy95SBBwCf7JBO/7Oy+5FduLYHPt9l3RgJJvS6bG5BViI6P4/IyF3EGXUfHE5GOW+mxfNkfWltRFbS/ksdWbYuP4WWgwI18vJcMlL9JvLu9KZkdPMLE1jHR9bvfoKR7uYDu6Q1vr8Q2Y1xTl2XHcgKyxHAp1p8/wlkMGFbMvDyVXJ06VEH5ZtgOZ1NXnh/Cmze8bvNOxFLkA2vnwAf67iNdiGDUM05j4+i4/O782D/P72W649RgzfAizumMakyMNo2Z+7uwC+i0etgfu7/yZSBmu+ryYrgWWTl5tC6fK263Q7smJeFyQb8b4HXNJa/nHpXZwLr91BG7uD1Hp1pPZATWUk6jOztcHLf+rcOUveV8UnNOjTGdnsfebeq9VRjk9n//d9nkoNx9qW1Sy1Tp5AV911qGfi/tmk10ux8Daj5vpqspB9FNozX7vvMRJ9L741JsBD5CE/zkbhzB+Wt8dneYMe955SDrNhfRctHzOr316vbd1Gy0XQEGSQ8jQyaXUe3YPXp5GB+x1AbN2Tw/CTynD7XY1Hz4oecyelc6mMAZG+b82rZvojsXfRlsiHYdvDahcjG57tr2biW+sguY4xl0jLd5hgMp9L+ZtWjmLtX6QaMDGw+sH5BNma/Qjbi7/c/yZtNna6VQ9hvh1IHbyYDLCczMiPa6+ryM6k36TqkG+R14Bqy3n0TIzN+PaNlGoP2f9vH1WYzMgPdhsw9pss7aTza1SKtvZh78OktyZ5Op5DXrfkyzScZYHkf+VjC6eR16LXkdfdK8hG855NB57aBztnkuezDzD0GyqrkI38Hzs+yOc+23VRnYEH+ISufnyAjg9s0li9VD9DVh/i/dqXeQaD7HfxmJWvZeiK6iLyL8G1a9Crg/tHeXmVpdbLS9TNgxyGs5yZkQ/WVbQ/WcdLajXwUYoMJfn/jxusl64m71V2Jvm2+NhmFPKueVK4C1ptAfpatJ7eBAyWN8t3+Z2afRT5P/V7yGdhW24i8c9CbQ/27tSz9707SJPdXc5sdQ4fukmOtK/lsadsBQpv/fzFG7iauWI+TCd2RGuL+X6keGzdNdvtOpgyMk/7KZNfLgTN89OdlmPu/axkgK0RnUkfIJytCnybvTpwMvH4y25oMlH6J7OZ+9kTLEjlexkLkCOGtg8BjbJOFyOvWbWTD6Idt9tuA9DvP8DJWHslgwDuZYLByoueAvjQmNBjnGGmtTDbsj61l61xaTl/bl06na0DdzzfQCDqRPSubY53Mi7Gf1iK7dy8zyXTeQwYBJnojZTHy7uJLyTuMF00gjdfWc/anGBmo80QaAbl5+UMGWd5Qt+cLyB5ip5GN7b80zlutGoGNdGeQDdKNGAlWbELLgWL706q/e/XCzWgxmPIo6fTXVd/BgEdKyDroDXU7rU8+CnpYX75mMJ/vApOBpwvqOfY55E2915D1pd+QweZbmeDsIIzMgtK7qx90aHQPc/+PkvZ6Ez3+mfvcvSkZPGvd83mS+e7NdPTwesydSt7A+TYZuPtBff8kWszy1pf2TPL8fwEZpF66Ln8bLQNv0/2nd/BrgiJiJbLbz/rkSeJSagOilPKwjmktVEr5zxjvRZnAzoqIGaWU+yJiVinlv3XZcuSBsiR5x+XcFumM+f8j4sMApZSnd83fGHl9Bnnyv2SS+TqB7I71nMnkq6Z1OlmpeVGH78wASi9/EbEiGeBYrJTyp0nkZU4p5Z6ImF1K+XeH7821rSLiUeQAT78rpfx8jO/MVSbrvtmBvOu6M/n89pOBz5VSftc1L6PkaWYp5d6IWKaU8seu69RcTk5rt00p5RMt0umVvZnAfY19FuRdruVLKW9su36N7zLM/R8Rc8hu6Xd03f9jpDewDLRMZxHyzuCKpZR3D/jswqWUf43xXqf9P+D/tC4DEXEA2dC6lBwkcpmaj964Ba8qpXyn5f/tlaUZAKWU++rybclZe/5aSvllx3WZ0Uun/r10KeVPbctAI0/9x9u+ZAD8plLKDS3SGeu4nU1eB5crpbyl5TqNeezW/7FSKeXONmmNlT4dzgF93+s/bn9P9sT4c8c89F8DFiml/DMiFiul/H3Ad2eWUu4d472B14C6HrPInk1fAW6rZeBp5B2xp0TEa8nzSadz24B8L0seS98dVBZalIElyWv4dyeZp0eSPQ02bHteaRwzK5NdzJ9DHitfJ699O5ZSfj+ZfI3zv5cgGx33kQ2R95F3Rk8me06WmocbSyk7d0i3t13/Vx9svDezpv/HUspJk8z7scBvSimnTTCN3rZ/Ftmw27yU8s8xPns88N9SyvH176eR++qEUso3J7QSExQRi5HjE9xHnus/TQa0ziTrTD8kH4U+lZwy8qullO93SD8a55Ilye73V5RSbmvbRqjnhZmT2f8D6t3LUqc0b3stGPC/XkYdK2SsesMQ/kfvnDWHPGe+hRyvbh2yPXcA2ctgBtnLYAfyUao/tEy/V69ZnQzibEf2yrmZvLH7yC715+nKgMIQ1JPItuQYA88in7++rJTy2Y7pHANc1bx41oOcWhi3KKXcNMm8NgML97uojPL5JUspfxll+f8quBGxfGZxeAfEoJNjZFDkD4NOoG3WsUVeliBPINeOV3mPiIeSFY5VSylfaFy8e5XIRYB/tcjzImSFYWaz0tmrPI7XIGuZn1bfr2mMViZfQd5te2Yp5RtdGra14XFvf0W5t58iYnHg7y220SrAr/s/16iItG1ojVWhnasy39+gGyOtjclnpr9Z9/eE9v9YeRurUTjBPLUuAzWdWQCjVEJ66Y0ZDO37/Bbk3YbT+r7f+91q/9fvjnWc9C7eXQNujyErFE8m572+ri6/mRyI9DcDvr808OdGhW9C27p+dzFymshlgLtHaZAuS1b8Bh0nY+Wp67aZTVaS7+0rk3OdX9uUzRbHbtey2R9s6bSOEbEBWbn/ZS03c+23/nPBgLQWrS+XLKXc3VjeO78tA/ypxTZ6ONkb4oq+5QuVUv7TZhs1r919DZHlyB4XZ5EV571KKb9os35tRMRSwA6llI9N8Pudb570n0dGef8IckrNiTZwZ5I9UlcgxxD5TinlVxNJq+X/ey/56M9X6//ci+yK/haya/m9ZC+Az5VSftuljDb+x6yaTjTqcgPTiYj1gXtKX2Oqr064RCnlr13yM87/6xxcjgyUPQ84opTymWHko+X/fR/ZK+ZucmaHjcjHRC8lA0D/JM/H3+6Y7sD6R8t0Rgskzeiy/xvf668j/a++Q/ZiHRig7ktvvCDFCqWUu7qk1/F/n0vOMPIyMpD3FHLQ43eXUm6pn1mE7IU3B7i1lPLbCf6v2WTvmxXJYMW3SscbDNNWmQbdJB5IP2QEq3P3PLISe33fsoUbr1ciB2BrO03kY8nI7q6MMvc6dR7zFul8GnjuOO8/jgk8gjHWelBHwx60nuQYB88c67PAZr390SI/q5DP2I06lywtu6eTj798qObtft3HqXPFt0jn7eRzbWcxSnc4Miq95hDyc/h4+RmjTM4m7/r1ull2eq6N7F59Ofl82tH0PSdPPpvWZht9lnEesaHlAHrAq2oZP568w/qwvvcPoGW3TfLRn/tNdzaB/b9u39/9jyo8Z7RyMcE8jVsGGp87vm7zU/vzV99/IfVRmAHpfIPsEfG8Md5vtf/rZwcdJwPLAHmePJY8T65Rl+1Adkv8IDmA4Vtb5ucKsivyqKM2d9xvZ5GPpb2Z7B7/uL73X0SLQfRa5Gn/Nnmq+biupnUsfbPv0GGGlxbHbtvz2+Z9f/cfJ4e0TOc7jPOsf9tzSf3sUAY/JLvY3scYz0S32UaMc+0mnwf+CxN7fvsQ8hGCUac8pf0jZs8knzPffYz3d+nfpy3T7Q1+O+GxDSbz3cn+kAGEb5PBxJXJ8SA2IHspXND2HDJKunuRvVUOIoMrnbd33f+f6j/++z7zIibxvPtEvsvI44nNRyZ6U5kuPZ/22ypkIzTI6+WVZBDoPEbGTOiVzf3brGdN6wVkfemIvvXrrfOzW+63l5MBqf7ZT5pjvbRJ50Qa7QnqQL6NdAZOWTlO+r3tM1/GS6j/a1Hmbk/MJMdseBM5ltM+EyyT63P/AZnn23pNxc8MNFSllPtKizt1oziVfC6HiHhaRLwF+ElEnFjTvRPYp9RSOZ6I2Id8PmsGOUjWy4B9IrvbENmF7zUR2bVznHR2JwdLuqD+vUFEbBkRj4iI2fVOxF5kA2GgiNgqIjYqVdQuwY33tybHPWC89YyIQ+vnjoiIx/R/tt5x6qUz6K7y/uRF53zgkMjeCM33H0ZWfAat25FktP8gcnTrl0TEqyPibRGxSf3Yz8uAbtMRcRjZzerFZATzqRGxU0TsHBFzapT0vjLgjlLL/PxsQH76y+RbyWf+Di0jd59a30mKiBeTo/++mnwebR/glRHxvIiYVe/KttlGzwV+W7LXxYyIWD8inhQRG0XEIvUu939bbKOXk+XkWPLOwabAQRGxR013GWCF0qJbWz3mflNKuTgiZkbEERFxfEScMYH9f0HdV2tDHguRZkR2c1yxtOgN1DJPg8pA7+7eVuQo3rOBSyJ7v/TeXwj4URnQLTkiDiHL4jbAfhGxX10+o67fbFpso/qdNsfJveOVgcZ5Mmo6R9eydRd5R+lH5ICarxyQl6jnwpXJu1P7RsTREbFczdOSkXeFV265315EPhf8YrLyvj5wTEScEBFL1e19fSnl/4aQp5UG5SkijiL32eHknOP/JI+TF9d0Fgf+XVrcZWl57I6732o6RwAfjYizIu/m946TmXXdFwH+2SKdA8jeEh+q392vrteLI2Ktuq0H5qem9UJyv72ArIi+uXcMN3x2vP1W0zmEfP58Z+AZkY/KNI+TxRhwfouxr93bRF53v0FOP/j2QevV51jyvL0yOQgjEbFW4/8+kuzSO66IOI4coHJDYJe6bnMa729CDsjZpr6zR/05MLIHz70w1yMrB/S23YB0IiLOjYjVG9+Nvs/sPyidySp5J/ZqcsDrt5A3WW4hg9//AraP2nO1ns9nDkozsh75QnKgu0cBX4mI5zfeb7u9TyKP369ExKIRsWHdJrNrOsuTN9QG3k2PiJVG25Zl5G75G8dbt6j1yfqd/0beaW/m/7Nk764JP17a0Z3ko0WXk+X67eT+ugR4ckQ8rmQPqDXJR+ra9Djo3RS8m6w7LR4R68H/1nlN8jGzQfvtDWQP6h8DD4+sbz2iplM67P/dyTFFbo3svUqpvcBqOhvXdR+oHm8HRMQL6rVp8TLS46F3/L04+toJw1ZK+Qd1IMqIOI085k4mx+/o1Vl7eX5Jm/zUcv12+rZFmbsn3Uvn9brNd1Md0fCnQBa6m8kCuDM5uu3uZEH+AjkoVJfpxS5k5A7yQuRjGO9jnCl1xkjnxYzMJX0YWbn+IjmIyB5t06mfW4Sc5uzz5N2R5Rrv9QbO2ZABg17VdP6P7FFwIDnYWW+2iF508yG0u+u2CPlM2wZkY/KjdR/8rwcFGalsk9bxZMAHcvTXz9RtdgzZbarNNppNnvC3qn+/hxzA5yLy5HZwm3Tqd984mfyMUya3JAc9emfHMt6bFnT3vjyeRN6NbTWQG9kN9N/UAaio86CTFfDz6TZt6cnUqUXr3xuTo6CfT8dB3MhK9Fvq62PI5+wOrflrNdc0eaz+om6TY8hB857LBAdIGlKeZtdtu3Vj2QXkIwC9v1doud9uBbaofx/KxAenHMpxwtjnyU7TgzbSO6SWxX1q2e7NEtBp/5GVv30af7+CDMK9FXjM/M4TGVB5buPvVchA8js7Hm9DOXbrvrqFvNv/MrIn1huovdvqZ9r2KtoNeHV9fRw5LeQp5F24LvO6D2Xww7qNbqH2viCDup9oc4z1pTPetXv3unzpjmk+tB53vev19fWYu7QeN2uQd9WXGpDOqrUMLlb//iJZoT+7HpOrk3cN73cXfZS0jqtl+iX1u1+jMYhfzdORLddvO7JXyE21PC1Sly/eSOtFXbbZRH/IAfd+S14P1iXrKg+p++/i+pnZwCtbpLVs3W8rNcrqE8gZEc4iH+VYbND2Ju/cngI8rf7duxFzY92fO3VYv5PIQSV3pW/w3bpes3vH5RjfH7c+yRT1MCF7C/yjHg+rk7NvfJScDetP5LPy0GJaXUYGue3NgPIVMjhxST32NibrVeOmRXbR/yEjM7L8Hxn0uKYee5vU7dnmeFu2bu+n123/E/I68L76/pJt0qmffSPZg643W8ldzD0F9XLAS+fjvrse+DVZl/9oXbf3AN+u7z+Elj26yLrlB+rrRcn69P6M9OZondaC9DPlGfDnf5WIDchuhLcCFzTeW528YLeaDqh+5xVkJaQ5Pckq9eA9j5aPZJARzRvJC+015GMXS5J3YK6kwwjcZMXv8+SdzneSlYCDGamcbECLUXjJCtZ76+uFyDsm57c5QY+S1kupF+f693I1Xw+pf29Hy4ocOW/2fWSl5k/U+cXJOzlXM04XwUYas8jnWSEfK7iZkZFgn0pWCltNeUVW+Cecn2GWSUaCM88lnwl9Rj2h3kAGKI6o+3Bgd7v6+1zy4vNB8k5b74J7ZC2X41ZoG+k9g7zQ7tZYNoNsvLVOp7Fdv0A2RN7LSGV5BfK4G7XLeV8ai5F3pVau2/5AslFyJhlcfCMdKrTkMf8FsvI20TwtTTZIlmWki+XhwHn19WuAl7dIZyXgWX3LTiXveD+m/t3m+O8NNPeU+vdDJnqcMPp5clUy0HEuHc8pZIX7uLp9lyQr8deSvSAGTu3YKN9PI3vwHEk+r30z2UPgaWRlslWFbRh5qmnsRFaID+5L9wDyujBwNg6GdOw20tmTnElgafI69Q5yNO7d6nHSav5yslL+bfL69JHG8kfUstFqBoRaJp9JNjp717SnAVfX168Fjm2Zn0P7lvWmdlu3979apLMjY1+7r2IC3ebJ8/5V9RjZnpzGejNyRPQzqF2GW6RzKPCK+npz8vqyNdmwORvYu2U6y5PBl14l/dF1O11Xy/zM+tPqOCbPvyeR16OP1O13LvDhRpmfpzMFkNOTrlbL9h/q9vgZGeT4AtlYvZPGrGIt0z2ZvkdL6n48hw6zc9Xj6z11n3+isfy5tZy2uVE1p67DueR1/+i671rPCEG7+uQy83Jf9eVnZfLm0yyyznURWbf4I9kj5Ad1+Zs6pLk38IL6ehPyPLVeLaen0HJKRvIc+QGyrrU7GYhZreb3OFrWKZi7Hndifb1/LZPfAR7aYd2WI4MlvUecVyIDTN8hg4tLz6f99izynPiiWp4Or8f+2fV4+2PdXq1nwmGIgbcF7WfKM+BPgTp6aH29NXPPx3sccH7H9BYmL4wvIysUvUrbTDIq3arSVl8fWU/Yn6IxdzUZ5VyzQ56WIweunF1PHs8gn+9/L9mI+g7tegKsTW0c17+XJSui19B9zvLVgcfU7bJwXXY5GTVflIwIjxs0IRs0valmNiRnPXgz8NTGZ24muzt3LRdLNV4vTDa+WlcEyQGBdp1IfoZZJpsXB7Jx+kmy8t+b13sOGR1evEOaa5HTp53RWLYEOQL30h3SOZi863MEjTEAyArcwHRoBOfIrukXko2vV9Zli9W0Wu9/Ri7ci5Dzzh9Wj5V/0HGqObICcj5Z+X/VJPI0u/F6TfL57G3qcbtm221U/27OVf0KWo5R0Nw2fcuWbLxufZwwyfPkKOnNIANwJ5I9Xf6PbCC+lBaVZeod0fr6yWSvosuAl/TWs5bvLoGumEyeGuk8gQxGnwps31j+XRp3B1umtQ5ZWXtbY1mrY5fRn/2eSVaQn0xeC/5N41rVIj/bkxXI7wEHNJZ/i45TlzL3tXNZMnD3BPKcO+5xMkpavYbyEmRg8biO/3+8a3ena2Xju8cBfycDW29qLD+E7AHV6hnh3jmBbEju1Fh+GFkBb9MwXYravbz+vSQZ6H4EcDGw/gTWb2dyhi7IgNBfyYbAhKYM7vi/jybPq1+v+b+JbLy/h+zyfkLdzq8BXtsx7f3IWcjewtzn8heQjc1Bwfxe788lyevJZWTdbY26fB0yQNgm2LU6tT5CBgROr/v8CLIu8EHGGberfq9NfbLTeWkS+613A+IWsiH53rrvLiADuGeR597tyR4LrYNSjePkETRuAJA9zj7S4Xh7FVkHOJfGdIfkjadruuSpfu8tZD1i/3rcv6mm3/Zm5QwyALVd/fuhtczPqcu3mg/77Y1kHfLIeh75A3m+XpIMmG1Vf44mB/fskvakA28L4s+UZ+DB/ENGUd9TD/LjGInWzWy8/wPa3yXfjrnvrr+NvBP1EjIC9zbgYy3SOaWeAIOsqB1BRu8uJy9mbwUun+S6z67r9yzyot26UVG/P6uxnRYlG82tuxBRo6k05imuv19U03o3jUdExknn8+TdxPUbyx7FSBfTK6ndzgek0xscbheyYbpU3/vvo0V0m2yAvp7GwDhkpa2Xn6vGy8+wy2T9zpfJrqgr1P2+MBk1723zi1pu683ICtAOzXLQeH1By220DdmQ3ZZsjPSeVT2HrHx/FDiz5bodQ2MQwbqOryYbWteQlbWB85WTdxSPZezB875FToPVJk+9srQ7WXlbj7xo3twxT08gG6LNstQrB2eQ82u/rEU6r6dvwEZGKkprkHdy2t4lOZRs2O4zxvsDjxOGdJ5slKWXMXdl70QyqNR7JGOJlmm9jb4B+5g7eHohcGqLdPYiG6CPbyw7gTxPbd02TzQa7+Q5dnvy8YePksHWK4F3dNhOr6TxyAZzD+zV9tg9m9o7ZYz3r6XdueSpZGN75/r31nX7f5Y8B3yI9oNxLt143T8w5OtoOfhh3UZHUbtF12W9423jWqYOGZDGPLl2kwHcVerrZWt+vsDIXbir6OtZMSC9URtCZOOk1Z3X+vmjyfEqLiIbJL1HvE6k3t3tmpd6rKxN9hD7MHmt+DYTGGy7w3o8hGzwL0peQ3ZmZCDU/yN7A/UCzU8ig41tenRt0Xi9BhkI+GEtI5uRj4wObCyR4y+s2tj/byZ7lpxEBoGubZNOI71F+/7em+xl9FGyYdq6R279/qTqk5PYb8uTPa6WIOtLL2fkkamH1/PIy+s+/XTbsk02sGfSeIyrLu+VgY90OU7qdxYnxxv632CtZL1wYDrNclT/PoAMIP2pUS7aXud657TDyR4Al9X1ObUufxUtgqeT3G9LkkGfDRvL9qv5uZUM4PTqp4+jeyDoIUwy8LYg/kx5Bh7MP2Tl/rXAvmQU87DGezPJyu6oIyCPktYjyAtPc2aIWWR31ZfUQn08A+5K1ZPgHWTldZ26bCGyu9RTyDtUh9DyLjB5l/yZZEVmA+5/IXki2aVo3Egr2eXrCLLBsFJjefMi+zXa3dl4InD2GO/1nhO9YVBa9X9eR1ZA3tnctuRzj0cAT26Rn33IBuhxZGX2bHIwxdXJSO429f8M2kZ7kHc3TiXvaLytcVLs5We3AWkMrUw2tvVXyNHhXzXK+6vRItJO3i36Dnmx/j55F+MJjf3/CLJyNCid3Ws5OQn4ZW+fkReAh5OVoye0XLdRZ8FovN6Fdl3vn0LeBTuFrLgux9zH8RzyItSmAtlfls6qZWmljnnao26nN5CR+43Jym5vex9CBhQGbe9xZ6/p8kM2Ar9BNvp73aR70zgtXMvAuMcJQzpP9pWlE8lzWO943Qg4sH6m7R2kXYEvNLcRc59PViXPMYO29551Gx1b988GZGX3MYxUItuMLr4jGWR7XF+ZXp48Rz6DOpPOBLbTGszds6ftsfvUmk7vnLY4cz87/RCyQdJ1G61et9ESdf32pv1MBU8gG53PYe4ATO/xoF3JQEDb81JvG61CX6VzUJlkyNfuRroH1ePq68zdg2NXRoLFl7RIZytgo8bfM5m7R8VLgC+3zNNKjAQF1yXvNm7T2O7XMcZMMqOk9cJ6fPV6hOxIXmP+zkhjqXXvuYn8kOf8D9OYaarm6WDyLmrzhsWVtGsEHkAGKc6gcceXvIZ/nLwmtwksvwj4Un09o7F8ZXKsjgMZ0KOg8Z3VyUbtqGPnkNeaw8f5/lDqk0Pcb2szMnBvbxs9vJbHT5PnthvIgFurcafIoMjF5LnszdRH3xrvPx/4fIt05jreGssfRwaVvkx9fLhlOTqdxjmfvLnzvPp69qB0Gt87kpH6yArkTYKHNY7dz7cp30PYd0eT58NVarlcnOz19B3yMaNj6meup/0Njx0br5er2+zHTDDwtqD9THkGHqw/ZJeY/2v8vQN5d6R353w5OkTE60mtV2F8GCOj1z+i8Zk2je0rye6wryNHOe/0rN4o6d1SD8pzyEbTScxdqXgW40xr1/jcN8io+oVkQ/Cx/evU9iJCXqB7A1MtSjaWlmBkEKb30uJuS83TE+t3P0mLnghjpNNmEM3lW6TzdepgeWSF61K6dY8eapms3/kq2YjdgGzoHjvKZ9qWy13r65nkXeEvUO/a1/04sAt/3Ua9ff8u8q7IV4DXT2C/fa+xvZ9GVnJ+RYtKWl86X6QGnsgAwJlkpejECZTt/rL0nFqeB97Z7kvnBkYGz3trTfcmMuixdF0+6jSrA7bRW+o2OrHvc23KwMcaZeBksnviLWTFeNPS4jhhSOfJMcrSGbUsvXICad1CnYaPPP7PIRuHb2bkUYw22+izjW10Gtkb5VdkkHDZDun8ijynnUw2GrZosx4dttNXG8fuwi2P3dOAfevrA8lz5O21TC4ziW30QTKwcBLdG9s/rsfGm8kA7s6jfGbgOXjQNmqZlysZ4rW7sW9+QvaaexzZE+AFZCNgD7ISviED7k7SblDmHWnR1ZnsZn0VeY66X+OTHDDuUy3X73+N5b7lT6LWS+jQWJrktj6JGgzuW/52ag8X8hz66ZbpPZ483722bq9Xktf0J9b32wzCN4tsUPUGEzyAvB58iAwAdamjHkCOdXAe2QvyVOa+Q7w6dbyKcdIYSn1yyPvtHWQvhSMa63kq2XOmN9bQsrQ7Ny3MyECcDyfPKV+tZaDXu+xRDJiiebzjrfGZFdvsv1HK0avIAPURjc+0vc41g1O9ngqLN97fjTruzHzYbzuQ17lfkAHIN9XlT69l/mgyCNC2V8nTyXHLLqe2T+ryNcleTvvTMvC2oP5MeQYerD9kNP0l9XUvMn4Fdd5pMkL5iJZpPYrszro92dj6RD3hnkh2lW47yNFe5LRWvb+Prif+XoOy6yBlTwI+0/h7DTJi91VaDuBUv/dS4IrG388D3tD4u3U0mtodvfH3JWTD9Gt0GFG2buu3N/5eu6ZzCvls2YxB+aLOJkFe6EcbRPPKuh/bXIg2po7cT30chOxu1bvI7U2tSMyPMlk//2TgnMbfm5F3zQ+pfw8cMIuRO+JvIi/cD228tyYZ8X1Dy/xs3dtnZGP712QFbUPyrvWovVbGSGsjxp+Zpe0jE71nUh9HViJ+XdPbiKwMvIdulbaxBmS9kqzItbk7vSEjzxEvUvO0E9mb5HLgopZ52ZDscTHm7DUt04l6PJ1CNrSeSN7J2pDsnfBmsqIzqFEzlPNkoyy9Y5yy1GoWjfr9Vev//wR5nH6l5nV9Mij4wUFloG6j2WQvhsPJY/lu8u70muR57nJaNI7Iiv2JdV32JYNAbyHv5syqeWzbi2esY+5h5B2yVsdJ/f7+ZINkNbIxuQHZFf9DtBissm6jRcgGe/82WoM8v32Ylj1oyB4RryOP1UeRXZvPIc9VDyF7VLQJlg/aRu9qkcZeDPHa3UjnScBV9fXi5HF3ItlguoCW40LQYhC9luksQp53NyYf7bq0lsun17xGfW/c2aJ624T7N5bfQh5vb2RkION5+rwzIw2rRYD/kNM9/xx4Tl3+WUZm7FiNDuNfkL0djyIbT28jZ444t+V3e9fe48lzdpDj0+xbt/V7adEDs6axMHnXdx1GejZ8jaxf7FU/M5txHnVgSPXJIe+73ja6lgwsvpgM5u1LBuC/0nYb1XQewdx13SXJc9vr6DCzVpvjjZaPKIxTjlqfu2saYx1vHyCD+ovV8r3KPN5nK5P1huXIANUnyOvAzWTg5PNMYAaGuu8uJXt13kBeZ3YDDpqKsjkVP1OegQfjD9l9fztGnrHtnZQOI59z3Bv4ZIf0tiO73b+R7LLeHK36ubS8+0reIehN6zaTkYroBUygCxlZ2fs02YhoPi/7FLLSPXCAOfIidhT1Oe6ar4cDX2985mwaA7MNSO8Asjvj28mK6Bl1+fb1QjBwNob6+f91S2/83oq8g7Nrx+20EGMPDvcp2t3hWpysSC3eKE+7khfshck7xduP8/2hlsn63RUZuZvdC1DsW0/aW3dMaw1yXIfnkBeEXm+SlclHJgY+c0mO27BMfb0hc08ZuXIt621nrphZ03gReQdvMrNgvLDu50uZu9v7UmRFoFXZbuTrVLJxM+GBBhmpTK9Io/FIVno/TosRtMkgwNrk8TupbVS/sw15kT6HfIa42RX/SwweaHQo58n6+cUa22hSZamxXQ8ieyVc1Fi+ONkzY+mW6TyNPK+9H/hc33vX0nJQV+a+a7QO2U31rXX7/ZPGuBqDyhEjXdMfNsp26nKczCKDSq8gx7hpPrbyKQYMwsbIOe0pw9hG9fPNRzfmkBXIY8jG/L20GEGfPC/1xqqZUFliyNfuvrx9mGwIXMfIc84Lk9fd19Eu4L0cGThZiNEH0ftum+1OBjPeV1+vBPy5lsmT6/5co+V6DWosn0eHhuAktu9uZKPqy2RD7TKyR98Xavn5JfVxkrq86/qtRe0xR/Ys+SHZ8L2ExiCwA9J6PtkN/EwaM/qQ58xWM2zVfHy48ffCNR8HksfywLwwhPrkEPfbzuSNoKvIO/jHkne7v04+jtcbP+u59RhsO9PI4jXNM+t6Hk8NAJHXgVaDFTIyaOVYx1vbQdB7AYg1gZMmWo5aHG/vmU/H27PJAN3VZA/j75HXqM3JwMaPGLmZsl3b462R/uvr9lmjHs9/pVHPeKD/THkGHmw/9eD+BtnQu5n6TBJZCViRjGz9gPbPcL6ckWn49iDv4q7TeP9FwAdbpLM2sCk5gFuzC9ny9QT3diYwai55V+ld9aT7EEYGY7uQdndvgozSrta3/MtkBeUYGlM/jpdO3zq9lbwT1Jy94O0MGPSqL4373TUkGwV/aHNyJO+sbFtfb0dG28+i+yCaQc7/3RygsPc82vvIC9qYg6cNu0zW7455x4kMnPycFtMW1s8vXNdxazJ4cB55928PsjL20ZbpzGbuGQaaz4MeB7ynZTrrM3KhXbLma+W+tAbOgkFWHtarrzeq+/AU6hRF5MX3wg7bfGWy0bV+LUvvmEBZeiR9jde+bXYcjcDAOOnMIe/a9oJtWzP388GtttEo6fYaXq8h7zI/mmxMjHsOYEjnyb51a87K0GxYti5Lo6T9UBozy5DBjzbbuznewnq1HBxLNlh6PQ4+1HLdtmeUu/RkT5dvt8nPOOk3z8OttlPftt2V7GnxXbLn2k51/w8caJBsAOxVX29Qt9Ebum6j8dapl1/yLleb/TZaV+RO5yXm0bW7kc7KZKDqBWRQd4W6/Bzg1V32X9826jyIXt1Pc+rrI6iP0JHn9nNozGPfMr3n17L0LibYWJ7Edp1FPpv+RPLu8fvIRt++ZBf5NclA6R217N7CxGaLOofsmfkzMvD0UFreOGmksQ8ZtLuNkTrLubS/WbUIGZj6BHlT5xTqtY0MKu7SMp1J1SeHuN9uq2X3uLqP3kFekz5T33smWTdovY0a6a9W99cXyN5OvePtElrWT0dJs/PxxsggrDPr8XU++bjDZMrReMGp1oGXCW6DmXXfbEYGyfeo5ebjjDxu1j/L28BHjfv+xwrUnhvkOfNmciyMr9NhuucF9WfKM/Bg+6kH5TH19evqCec88vmkbclI62Ut01qBjGA3p4R6BCPTKC1Go4E4Tjor15PiZWTl41KyIbJWfX/zejFodXDVE+nKjFx4XkpejE4gKwHPJxuU4w4OR97Rej1z34lszvF9OdnYHXdqx/r5xclI5JbN9W68XoPsojbu7AVkw+9y8uL/fkYaf81G124MnppzazJCu2Lf8p3IbnPvo90gmpuQjey31BPgkX3vX0KOMj7mNhpmmaxpbEkGfMbspk02Bse98173/3tquXl/3TZB9lI4jYy2n8eASDsjM1ecQ1YAVup7/+FkwGTgPMp13b7EGF3HyTuMA2fBIBsAnyMrkM1pnPYhn738IHkRapOnXpm8sJabVcgK3L5kQ/n8lmWpN2Dh/daNvBhvSt6ZGLRuW9R0LifvqvSPor5Rm21UP7ty3aYL9y3fpu7/i8g7QWOemxjSebJv3S4jKyLNu+QzyHNl27LUW7dRjxOygdhme29IPrv5IuY+V+5ey9gHyQbuoClw+9etf7rPJYB7+o+fMdKaw9wBl/4y0OqYI3td/YXGiP01HweS57zTycrpoN4Jm9a8v6dvn+3ZZRuNt98YuQu3ONlQHHR96+23F45SvqPNNmLI1+5Gur3B705hJOg5g2zgvKtuxxv79+so6TyO7N3yMEYZVJYOg+jRuHaP8f65wCu6rGf93lPI89TtTKCxPNEfMnj78fp6F7Kh+py6v05tfG7LWk7aDKC4ed3/5zPS6F6nptnlMawtyEbthxmZFnt98rr7I/K6++mW+61XfhYjx974XM1jLzB0EfWRjjG+P5T65BD3227k+XEPstG/LBnwuYk8H72ZPG+/p8M22pA8F72J7ImxEHkOmNX4nze1ON42IYNT5zBKL8K2x1tj3Zo9QXoDaHd61GGUtJ9GBpZuo47zMp+Ot6XI+tFS1EFOyaDrHmRdovcoTutZ3sb4P28kz5G/ZGRazAmPQbQg/Ux5Bh5MP+Szsv/XO0jriflY8m7raYwMCtL2Gc7DqKO0kg2/q8huOx+vB8ZLqd13BqRzJvVOA1nxuoa8IL2BkQb8wzqs58WMXHB6AwCtQFacjiXvlg58Xpms4DUHfmnON78u8Dda3CGpn/9gXc+LyIZac+7rhet7A5/Fr+v0crJB84bm/29sqzaj8Z9PHeyFrFi8huwmt2t/ei3ycyRZyd6fkXmzH9tI+2nzq0zWz/5vCjEyUPPIWlabA98NTI/smvZysnLzSvJOzSXknZvehbbNtm7OXPEuGgNukhfunajzYndctzXJO9WH9taNvLs77kwa9XOfJB8F2IC8mO5KNvo3IBtjj6D9dLHNMnkscPQEy2RzwMINyEruSdRpOsmK7eNbpPMxRsbueCdZSb6YkYEnt2uzjepnv09Wzg4hL/jNO9wrkoGTQd3Bh3KebGyj3rq9g+xCeUlj3Xai3gWf5LrNIs9xO7ZIZyOym+0PyLs/e9XlW5OV03Vp8cjEoHWry1t1K655+RWNcw8jDe7ZZGNn4DFHNkh/RlaCf8nIAJ9rMzJWTZuy/VGygXQOWTHfoPHeGjW9gduofv57jf22Ut9+6z0Pv2KLdDZs7Ldvk+MgzCDPBbPJRsS4ZYkhX7sb6fYGvzuPvJ6cWsvZFvV4OpIWj62RFfNbyPPvXmTjaxFGxnZ4Nu16Kj6FbFS/uh4b/QGqnWkR4Kif3Zxs0L6PPF7XJxtip9CxsTyZH7Jh83HyXHQfea5cmLyufJ5695dsyP2o5br9nGxkn0oeu28lG0y7UOtPLdO5gbxrfFHdh6sw0ttsETJI0eYavhcZrB11lgyyu/s3x8sTQ6pPDnG/LU+eT35OHbuh5ufZZCN887ofW22j+v1v1fU5jRyf4DzmHgfpYMaYKrkvna+TgeUPk8GJZfreb3u8fYs6PW9dtx3I89WzGXnUr+1A0b1gycn1WJtRl51Onhvm1/E2g7yu/Zw87y5al8+qy19f922rWd7qdzclz4fvqNtoKbKn6XsZGRdnno7BMp1+pjwDD7YfRp5pXhJ4bWP50uTzm2t0SGs1Rp6vejtZwVmNvEieSTYyx61skXcej2fuQQ5PJy/cF9HoltQyT/uQDcGlyLtIZ5GDKL6eDndJyIrLFxt/H1dPOp9hpPF2QMu0nkFeoBcjL4YfJRvel9YTyGz65n8fI52d+vK0GTkg0MMa+2PpFunMqNv3+Pr3jeSjEofWE92b25xcyUbIp5i7QntC3daX0nJwmyGXySeQz8uvUf++qublDLKh+ciW6awJXNP4+yFkxeH4+rvVyNsMnrliWdo/37hz3T+rj7FurcaFIBsvzXW7lXwe+Q1kg7DLtJw705iWsVEmN2qUyWVapPMYssHWP2Dhm8gK3x4t87MqjXE/yIv3iWTD/de0nH6psS4fJwdcu5K8C7sHI3e2VqPd7CeTPk+WkePtE6Os24vqurXubk02YD42yrr1xhtYkw4zDpBjnGxLHn/fJO/GX9vh+yvUbb30GPvtZR3SelQ9xg6q5fmT9PUMo+Xz2/XzzyYr508lg4p/oMXjO43vP4vGNGtkxfbYtt/vS2uLuj7PGGO/rUOLXg59+227xn67i8bAcwO+O9RrdyON0Qa/exvZ66z1uamxvW4gz5EXkUHUj9GxAUhe+99AVty37ntvmfrec1qm1d/oPoWRGW1aN5aH8UMGbn9Zy9FHgYPr8ouB59fXj6YGdQektR2NGSDIgOvLyV5wB3bI08uBK+vrRcnr02VkY7lTDxBy4MVew3Q9MjD0JGDzuuypjPN4KEOqT86D/XZg3W/PJq95nyODeXfSGFSxZVrPonE+q+t8M42Zllqm8wrq45/ko0HfJRu236LDwIDk+fnyxt/X1J9eULZTA5mRYMlbgN+RAap163uLkEHveXq8MfdjZD+p5fIsssfGbmS74t31/ffSYpa3+tnv1rJwDHmt+0hNc2NGbtLNlylMp8PPlGfAn7ojsnL7tQ6f741Y3TthzDXgGjmI0o4t09q0HggXkhfmb9Xla5J3T7uMMn85IwMoHkPezXkOWdH5JO3vAq1aT2JrkxX2K+qJ7ph60u4y0vELaFwIyYtYr0LRpYGzMSMzFPTukL+L2pAgK0qPaZnWOmTj5inM3eV9Tt2nrZ63qp99F/k824HUQf3qybI3xdqEIqRdy2T9zuZkxfGY+vvSunxJMtjx4g5pXUfeYVm0nvSvIwMAn6BOsdcijUeQjaLZjD5zxSW0n01lE7JydkJdt0v61u2lbbc3OSjQpWS3yG80lj+dDCi1nYZpTbKBPIOR50k7l0myQdkcsLA5gNZzaTmTRuP/X0Teebuhb1+8m5bnE7IC+ujG3y8gK27vrPviWwwYq4ThnieX6Fu3GyexbjvTeP6U7LbbXLebBq1br0zW348CrquvNyBHrf4Z+Vxqm+nhNq7rdvkY++2sDuv2UObuafX6mpczyccBrqExrVaL/fc46jSA5B2ue4A/1X3aJhC0KyMji88ke278Hx1GTW+ktUzffjuib799s81+a3x/M/K8NrNvv7W6BjDEa3cjzbEGv9uTDFau3TG9g8lG8WyyYfr3ut22oF3g/ChqA6eu4y9o9JSqZWTUO+CjpNXf6F6JbEBfRuOxmnn5w0ivvReQz6RfR9aPriWDZdeRDZVOjSwygHA9eb1bprH88WQ9bNxHRhrb8lBqD566vT9SXz+G7Ckx5uDOoxwrH2z8/cW6nc8j78S3GRRwKPXJIe23lcjzz5PJa8r/1f30dTLYdhoZfPkytX7RMt1tyXrAavXvZ5LnzN3JwFKbniAzyR5Fva78xzMSFNq95mvcqSYbaS1E1rFeQfYsOqUu34J8vOrpHdZttGDJt8lgSS/QNK9nUdmM7MlyIVlXuobsbfka4B9kYOg3ZNBzmQ7p7kV9ZKn+/dhaxpv1ywdN74RSCjPQlIuIxcgT5HFtv1PSP0spB5MnsvWBz0bEphHxdLLx9IWWaX2HvBN5A/m86vPqWw8nn/35T4t1iIhYiJzi6vMRsQhZsXx8KeWD5Inpd2T32zZ5up2MiD6LjE6/q5Ty61LKCWSFd+s26VTfBg6PiBdFxDbkyfrHZIXrsRGxQov1W5M6UEtELFpK+W9965PAyhHxjJrvLw1IZ62I2ImsXP2VvAv4uIh4XEQsQQYYViul/KNFOo8kL0QPIyuAW5GNQsjuxlvUPJVB6zdK+p3LZERsQq7X2eTo24uT5YpSyl/IrnwPb5HOphGxPRlMWJmMKD+NHEjoD2SldosW6exW//c7Sin/JqfkgtxWa0fE3mTj8lst0tqd7Bp3ANlFeQmy4d9ct43r32Nu74jYIyK2Je9sfJcsP59rfGQFslv5wH0WES8kt/EHSin3NY7TTwGrdCiTLyR76pxMNoh+RFYkexYnzy+D8nNkRGxOlpkbyOPrmsZHtiZ7zbQ5n7yCrGh9u7eslPJu8nxwO9nj6A+llE+Ol86wzpMR8Ury2DyFHMD0XDIoNJF1O4oMIHytkc9zyEdyflnX7feD1i0iXkY+kkIp5SvAeyLiQPKYO6aUsjZwa4tzycvI/f0GsiJ5LnlXsLluq7VctyeQDZjlImLJmrc3kZWtIM8Ls0sp1w9I54kRcTBZef088LmI2Is8X76eDCzeWUq5d0A6O5OV/1UjYvFSyr2llBvI42+NiDiiXrcGioh1yCDXH+u5jlLKWWSZ/BW53/7YYr+tGxGPjoj1yOD4O8nK6Qnkc8RrAz8ZtN/q/5/0tXuUNH9OBk53Ax4ZEQ+JiIVKKVeR181tB6xf9C26A3hjPQevQVbctyHvUt83IK3Z5PXt6Jq348kbAQfX7dfL899art7PgUUj4sURsUwp5c5SymlkwOzIiNiqZTqT8VGy/D2BbCR/gWzwHEVeO1cC3lZK+VfbsglQSvkNeQd5XeCpEbFJRMwupXyObAiOe82MiKjXnQtKKbfU/30VOStA7zryKXJ/tMnPH4HZEXF6RLyG7Em3D3nOWpVsOI+Zl2HWJ4fkEvLcsz95jfwIGQS6p+bll/W968ly3tb3yHPjcRHxbnLQ6k+WUj5O3ijYcrwv1/12LzlF94/rMfMVshcVNZ1rqfWTQeo540gyQLU62eimlHITWY7W7LBuPwV+ExGr9bJLPo7xRmC/el7pXD/t6KNkz6+fkdf/1ci626LkuffJ5DVrJWpZb+nHwD8j4rER0RvA/GayN9cREbHcfFi36WWqIxoP9h8ysvgwOty5HSWNGeRB8joyQvZGWnSRG5DmUmQXnlZTIDL3oIS950iX7vvMTQyYb5q5uyatQ97FvZUclG898gLyA2qXuQ5p7URW4N/e3Nbk4warDUhnJTLqfD15x/fZjfdWIBuGv6LeLRyQzvV1H32YrFCsQb2TT1a0vtAynS/Vnw+RI+42BxpbruZn4B2JYZVJRmaKuJJs8K9OPmLSe25zGbKxulmLdL5OVmSuJxu6KzLy3N7StTyM26uglt87yLvSZzbLC1kxuYmWM1c00rqJDHKsUZf3nsFbvuW6LUU2hm8mK1W9u0BXkw3vI8hnCsdNp1Hufky9W0heqGeSjaclyIb4LwetXyOd5owHyzIyvebSdb0HrdsKNe/NdBYh70jdRN6J+xGwact1+wV1jnvqbBjUuy912V9pcQ7oS3dC58lGfnrbeiHyefLryYrbUR3X7VayEnk1I4/PNGcy+Mugdavp/IQ893yirteqdb//oMM26eXnh+R5aIVaji6s+/M1HdZtpVq2zyYDZVuQlbZVGp/5U4uytFItM+8iu6JuSt69/wXwpQ7rtlLdHmeRDaHNa5lcqb7/5FoOxh2stH525bq/LyPP05eSY2+s1fhMm/22cv2fl5Ld9y+uv+8CvtulPI/zPzpduxvfW5w83jetf+9HBl9PpNtgyr10mo+5HFnX+9P170UZ8NgLI2NA9B5Pa55v31W3XetHZxrpPpoMlh9E9irp9Vw7G3jeMPbBOP/7GTQeRSIfwftULaePJM/jLyMbr22neF20bu/etKF7kI+rnUwGUY+ux86gaXUXq2Vn1DJMXtNvYvAgsc1HMBcnG28frNu317PzeOC4cdIYSn1yiPvtILJRviR5Xnk+WXfsBRa+UNfpKPK82Wbg2l5am5Ln3APIsS62rO+vStZRxn20g6wfze5b1twHK9dtNWgA3BUZGSdjJnkT5wLyhsebuqxbI83lGRk8+91k3a63fpfScZaICey3jam92+rfK5C9FL5BBmH7y+QbO6Q9kwwInkXWez/PyOPYF9CyF+0D6ac3SJKmWCM6PNl0ZpQBUf+W6cwkB836cMvPv5OsrB1dSrmnl0apd5Ai4nTy4n9Ei3Rmk3dq7q7LnkZ2u/4p2TD9WSnluA55en3J6D0RsUgp5Z/19dvI512fMyCd95J3jE6KiCeTEcinlFJuqe9/EPhLKeXwDunsUdN5ZCnlnoh4OHn37p+llLs6pPMUstKwWynlx/X93ckgydnjpTNIlzIZEecDPy+lnBARrycDJf8lKzLXkN3uVmyx/5vpvKGm85+aznVkhHmtkj1Vxs13RLyEvGjfRd5d/grZfftFZGX07lLKmHdJxkjrt2SF7YvknYMjyC6iq4+3bmOkszvZ8NqBLNuQ4z18Yrw81fTOB24ppZwaEQ8j725vTlZyriAHsfxvKeV5Y6dyv3Q2qun0um5/ghxTYbVSyqs7pLMx2bVxHTK4tAkZtLiptOsNcBh5XBwcEVuTAYC/kZXma8jK23allHMHpTXO/2h9nuzLz3ZkI/susrK8BTm42w0t1+0Cslv6mfX89LNSytt6eSIHq9qulHJ+x3R+Xko5PSLmkBXvW+vdyX93SOddwE9LKafX944gz583lgE9CurnzwJuK6WcXHth7EQGfv5ENkx/T47q/ZaO6exMBse2IZ9PPjGyl9ignhfj5efjpZSPRcRWpZQbW6zbmTWtN0fEpmRg8baarxPJgNf2pZT3dEznNDIodCc5ZeWv2+y3Af+j07W78b3LyHP2UmRQ+iXkwKEHkQ2DZYAvl1I+2iKd/1CneybvJl9P3g08s+Sdzjb5WYJsYK9X+nqRRcSiZIPkHnKsgf+OksRY6c4iHy17HFkmZ5N3mA8jGzi/bptWV5E9uI4m73T/tJfviDiUDHA9lyyjF5INvF1aHMPnk+eihYCv13Pw6uQjCguT19Avluyp0CadmWSA463Av0sp90X2NnorcEfJXiLjpbMouU3XL6XcUOs2TyNvfNxGXgueTT7KcOcYaQylPjksEbED2RNpabKethp5c2oDMrj8MXKcpxXJffCFFmleTO7r1cnel9c23luY3N5/KqUcMyCda8nHlF5c+npHRcRSZAD05yV7ig1KZ03gqJK9Gojsvbs9GbRamXw8s826rUj2IPxXRCxO9ka5g6xz3RQRvUeaH1tK+e2g9CYqIpYlxwa6maxrfbqUcm893k4kr+M306JMNtJcnbzZcX3dTzuQ54+/lVJ+FNmT7aqa1j3zZs2mqWFEJfx5cP+QFb07yQjkt4FX9r2/KXlnZ9A0gf3pvLbv/Y3Ji12b5y770zq67/01yIbluM9ekqMbf55GLwayYvTa+vqhZFR50N2W0dI5kzpVDhk5fWKL9RqUn5XJQY8GPls8xP0/aKaIU+vycQc/HCedvWs6b6nL244uvFXdVtvWMvg18qLWG5G/y8wV/Wl9vaa1V32/7fPl/encWNPZoUNeViQrwqfVv68k72o9p26nk+vycQeubJHOm+vyQQO7jpXOQWSl6Oi261a/P94giufQ8nnpIZbv0fKzFtlN8kQGzDLRSGedWm56d0R2Iu9AHd/3uXGfuxwnnTd1XK+x0jlpAttoFnmH56T69/fJZ9MfTd7Ne1eb43acdB5V0zlzkun08nPOoO3cSGvQ4IcvbbnfxkrndTWd1gNfzoMyvhcZcF2EbIQeTA7G+SG6P1fcTOdA8vx2ISO9ntqevy+r2+UqRhkvhWzI7dgyrb3Ja2fzzveq5LnuuWRvpYEz2AxhO88kH235FPkoUTByB/5y6qCXZMO1zSwa+5GPJ61ONvwup2PPrTHSuZRGryQy0NB23JPzyR4WH2akXrJMPYZ3IceOGHMaPYZUnxzyfluW7KV6W903B5J3pg8hG6N7dUxvtMEmn0ted1etnxk4o04tT0cx0lvtmlqun0fWcVcAnjWBdHpl4VkM6NkyRnrXkj3fnjLKe71ZFU6YT/tuTfKG1OVkELF3vL2F7Cm6G3lNaDW1I9lrbv8x3luo7sP5sm7T7WfKM+DPgv9TLwCvqa93ICsAX6FOC1ZPbgMHvhsjna9SB4EhB4lZb0h5eiHtB6nZhNrlvv69JXk3CRpT7U0ynSup00hOMp2rmcfdNsfI01gzRSxVL3JrDiGdz9BhMM76vUPqBWMR8hm6l5J33p8wgXUcLa2P0SIQNCCdo+p+a91FmawsXEZGxr/cWL5M3d6rDyGdz9Byho9x0lmWvDvdar8xeBDFLzIfKv4d8nM9jSloB6Q1i5Hu9r3egRuT56ZWM9YMSOfqIaVzFR1GhW+k9zDybs+nmHtmlZlkZXzbSaYzg3zMq+1MMePl5/q26dTvjDf44Xm0nylmvHTOaZvOPCjnewHv6W2fxvK3kUGctus3Vjpn1HRaBbq5f2DiALIx9zE6dLmuae1PTsv4frJ32bK0DCYNcfv2BsxdjmxAFnIAxgvIngm7kr3UWs3M1Ej3ShrXMrLnw9mNv3ceUjoDZ8Gqn+sPTFxG90fThlKfHNJ+a5bhj5Ln/3vIel9vrKE/1HVeukO6Yw02+VayftIqSF2/P4vsTbQkcHg9Tu6jZV15HqTTH5y4jgzKH0rWExah5fTDk9hvS5H15L1rmfx2LdOnkTetvkA+otZqZq5Guvsw90xv+5FjRp3VO3aZj4Gu6fYz5Rnw54HxQ6PCQUbpDqwnkh+R3Xonm84PyQHGhpGnH5LdDdumE33pLEVG4E+gMf3fgpbOfCoXO9FxpohhpkM2Ks4iu/CfUZdN6GI2rLSGnKftadylm8R2mlbp1O8+m7zL8Q2yIfb0YZSlSZTBoeWHue9M7ksGTjuPeTLd0qnfX4FsQJ5M9l55DI2ZTBbgdDYnxwI4ipFnZfegMSPG/ExnmD9kT7tLqL0tGssXJxs/bWfCGVY6ezF6YOKtdAhw1O+cQgYkDiB7h51J9hBbpL7fu2Exz4IM9X9eRAY1vl33/UvJMXW+RwZKDq+ffVqL9IK89j+DbDg1A4I31NdHkAMsDiOd97VczysZToBjKPXJIey34+p+O4McAHuZevz+vO6zi8meSh+g/SwaC1GvkWTj+h2MjBOycE1rq5b56415cCIjPRM/RwZg7gb2G0I6dwHP7LjdhhKcmMR+u5YcXPgbNf+fJ2d4OZMcT+JKRno/LdUh3cvJmwtLkY+VfqIeH2+taS0xr9ZpQfiZ8gz488D9qSeTv5DPAU55OkPO0+n1BLnjAymdIe//xcjK02S39aTSIaPUVzIy+NaEK47DSmuYeWqkOZu8IzDZ7T0t0mEeDDY7yfWZZ/khK96tKtsLQjrk88Snko2lzzLBAaqmWzp9aU5o8MN5lc4E/3dz4OJtyMr2d8nn3RciAwS3ABvOj3QaaUw6MEE23maQwb9eg205spvzF8iGwCnAZ+bxNt6bHABufXIKu9PIR4uOJaeJfkvX/DB3kKV/QL53kT0evsaAwVSHmE6bwMThDAhwDPgfQ6sHtvx/zyQfSdyBDHC9gey1dxD52MOqjc/eRIsBIhnSYJP0PTZUj4tTah572/uRDJjidVjp9KUxKDix7zzeb4+uZTbIwMabyZuIPyQfpVmxfm4R8pGutueSWeRYIG+qx8btE0nrgfwz5Rnw54H7Q97R++h0SWfIeXoYcOIDLZ0h7vtJz14y5HSWrr8n3aV4WGkNOU9BNnZf+EBKp5Feq+eu59fPsPLDkO6KTrd0alpLkV1c13wgpdNIbyYd5mSf1+lM8H+/k7yTt0Jj2YvJO8GXkI/RnDEf0xlaYIK+xnJf2uuRd5f/Q4tZdSa5jZ9HNmpmMtKIXJfspXbURPJTt/d5NEbuZ+SxiueSNxcGjoUyxHSGEpgY8D+GVg9s+f9eTvYkeSfZO2FdMrhwFvXxi/q504GzWqbZ295zxth2rdKq6ZzTl86B5COH9xu3YF6nU7839ODEBPfblmQAYytGZopZnXxc5TYaj4MysUDQ6uQArof1faZVWg/knynPgD8P3J96AW3dnWhepzMP0hpWo2JapTPkMjDtGjr++OOPPw/2HwYPXLwjeSd20KCsQ0mnfnYogYm+tJqNpdmMNOpPAS6eD9t5fXLMjs/28kMGR7Yku0mv2yU/o2zv/kELt6jbq+sg2BNKp7GtJx2YGPA/hlZ3a/n/tifHOPpDs2yTPRc+Tj4+1XqAyBbbe6KDl7+q8d6qHdZvKOn0lYGhBCeGsO9eTj7ucDmNAHJd196g3G+leyCoeV5qjqXUOq0H8s8MpHmklHJvKeXP0yWdeZDWpKfnnI7pDFOpZ9vpko4k6X/eXkp5Adlo3zYivhoRz6jvbUE2EO+dH+lExDbkuCT3AddExNEApZR3lFIeRjYG9icbC+PqS+uzEfHKmta/S04btyw5w8khLdZtUkpO53wh8FhyBqYvk+Mn3ETe9Z41gfw0t/f2EfGViNirvrcpOQr9X+ZHOo1tfS9wbWNb/6d+5CZyJoSTW67bqIZZd2v5/75MNiTvJsvRW8mxN15CzjCyKNkoP6XltobRt/dT63td0mqm8+jefiul3B4RL4uIDSeQnwmn0ygDhTzeXgVQSrmQ7HFzdcv8TFpELE0eSxuRvRSuj4jz67rsAvw2IjYgA3vjTodd02uW72si4jUAvbLYJa0HurCeLkmS9OASEbNKKf+trxcip4k7gJxicVYpZd35lU6tuD++lHJKROxATj37UOD0UsrlEXEU8PFSyk8mmNYccurhKyLiJcBnSynfb7N+ExERTySDKbeSj9usSd4RPgH4NzkA37dLKY/vmp8B23vhUspa8yudcbb1qaWUKyPiQOAHpZQb2uRpqkXElmTj/lvA38heLY8jZ2H4CdnlfS2y58LsUsraHdIeb3u3TmuUdJ5d01mxpjPR43ai6QwqAy8DPlVK+WGb9CYiIo4jx8pZjRxr40fkDFOvJ4/DJWsenh0RhwI3llJuapHuoHNJ67Qe6AwoSJIkiYhYEvg18IxSyqfnZzrDCnAMSGtlsrG0Tpf16SIitgfOJqcsnUEGD+4lu03/mByocXVyyshe421S+ZnG+611gGOqRcRjyDESvk42QH8F3EE+k/8bcprAr5KzPPyMSW7r+j+nbL8NM51hBScmIiK2InsBPZcMAr2SnJr76FLKuRGxKPmoyv7k8b/wkAJBndN6IDOgIEmSJCLi2eTUhXtPk3SG0lAadloD/s9pwC9KKe+MiOWADcgB6dYkR76/c9j5eSDvt/klIs4m7zafHxErkoMwbgH8s5RyXONzQ9lGw0xruqVT05pfx9uhwKNLKQc1lj2cnO7zG8AbSin/WRDPJQsSAwqSJEkiImYCi0/2efUhpjPtGm8t/s8+5DSQB5dSfl6XrcbIYxzPLaX8a8jr9oDdb/NLRBxJzkhxaCnl13XZRuRjKn8Enl8bpkPZRjX96bbfhrlu8+t4eyg5Q8b5pZTPN5avBhxPBhRuWxDPJQsSAwqSJEmadqZj463l/zqFkenqLu0NUhgRnyMDDb+cn/mZ3xbUdYuIt5PP4H8e+GKjq/vHyf12z1Tmb0Eyn4+3p5M9Eq4DDi+l/L0u/w5wZCnl+gX1XLKgMKAgSZIkTULtBr1uKeWmiFgDeDQ50vxDga8BywO7lFK2m8Jsqk9EzCKnF7y1jtq/N7Ac+Tz+j4FVybvR205hNtWnjlWyKvBP4IfAL4G3kWOTXEEO0LhQKeUpU5bJBxEDCpIkSdIE1QDCe4GFyAEYP0LO8PBncvC2g4DPAleUUn4xNblUv9ot/mxy8Mzlyekhf0oOKLkG8FRykMbzSik/m6p8am71ePsYOUbCb4F1yAE0TwAWBx5Dzsrx81LK3VOVzwcTAwqSJEnSBEXEe4BfllLeFBGPB3YnZ3e4rpTy0anNncYSEReSjc7jImJvciaA3wNnlVK+WT/zv1H+NT1ExJvJmV9eXsdQWJ4M/mwCHD8vp6jU6GZMdQYkSZKkBVGdSu73QACUUj5XSnkZ8GXg9RHxqqnMn0ZXpxO8D7gNoJTy0VLKU4HvAR+JiOfW5QYTpp9rgOUiIkopd5dSfgC8B7gZOHxKc/YgZUBBkiRJmoA64OIFwMMj4pkRsUpdfjGwH7BhRCwylXnU/ZVS/gGcDzw2IraJiMXq8rcBzwE2ioiYyjxqTN8AFgWuj4inAJRS7iqlvBnYLCI2ndLcPQjNmuoMSJIkSQuiiFi4lPKjiPgAsAewRh1d/iZgbWCbUso/pzSTup96d/vLEbElcDTw8Yj4CvAzYFlgp+Jz4dNKRCwOLFNKuaP2IHky8OyIOIgcC2Mt4CGllO9MYTYflBxDQZIkSeogImYDrwUeQo6XcCywIjlLwLLA9sCvgdNLKddNVT41t/qIyuHkHe7Z5MwAawGvAP5Dzg6wOPDGUsq1U5VPzS0iFgYuBDYAbgG+CfyIHHxxJ2BfcrrPT5dSvjJV+XywMqAgSZIkdRARbwNWB94CHAA8gexCf2op5b6IWBZYtJTy6ynMpvpExOnAusAlwGOBZwAnllJOj4jVyZk6ZpVSbpnCbKpPRJxG9k44JCJ6AzAuDXy/lPLeqc2dDChIkiRJLUXEEsBVwDGllK/WZesAZ5CPORzQmyVA00dELENO3/nsXsAgItYF3k1O9/l8p/WcfupYFq8B/l5KeXtdtgywCzlOyRWllPdNWQbloIySJElSW6WUvwIfAp4SEUvVZT8tpexBDtD4yKnMn0ZXSvkj2TPhiY1lt5ZSdia70G87RVnTOOpYFtcA+0XE8yNiTinlj6WUS8hHVp5SAwyaIgYUJEmSpG6+DKwPnBERG0TEzLr828CznCFg2voccHBEfLAXDKpuBg6akhxpoNrj543AesDhvdkdgNvJxx/+PFV5k7M8SJIkSa3UEeU/UEr5cUQ8B3gTcBnwqYj4LzlA3PnOEDC9RMQmwCLAD8kBM88BfhYR7yYH9jsCOHfqcqjRRMTSpZQ/1T+vAf5B9iR5Yh0P48fAW0op901VHuUYCpIkSdJAEfFSYNdSypMiYmYp5d66fCVyysh/APeWUi6awmyqT0QcATyVnMXhW6WUY+rytYDDgHuAf5dSzpy6XKpfRBwObARcCnyvPrJC7VmyELA88N9Syk+nLJMCDChIkiRJ44qIJYHryQH9flC7XD+SnHrwJuAi75JOP3W/fRl4ChlQeDe5v/4J/KqU8qEpzJ7GEBGLA7cBXyKnX/0O8FXgJ6WUv01l3nR/jqEgSZIkje9AYFHgdxGxBfk892+AW4BdgS2mMG8a25HAl0opvyTvaj+a3G+/AfaIiG2mMnMa04rAq4BnAp8EtgSeB+wZEQtHxAkRsc9UZlAj7KEgSZIkjaN2s341sDWwKvDOUsrZ9b03AfcBxzl2wvRSeyhQSvlLROwLLFNKeXd97zhgJvAG99v0EhEzgKUajzksARxABu7uBZ4BbFpKuWPKMqn/cVBGSZIkaQwRsXAp5c/A6+pz9/sAlzc+sirwfRul00tEbF5Kubk340adZpCIiLqvVgd+4H6bXurjDjPIIB11vJK/AmdFxKLAr4C3GUyYPuyhIEmSJI0iIh5LDui3PvAx4FvATaWU/9T39wZeW0rZeupyqX61N8Krge1KKf8a5f2nA68qpfjIwzQSEU8Enk+OTfJF4MxSyj8jYkYp5b6IWBG4qpSy7ZRmVHMxoCBJkiSNIiLuIJ/DfyiwCrA08AvgCvJO6fOAe0opV0xVHnV/EXEjGei5NiI2BjYG/g3cSQ7wdwhweynlyqnLpfpFxLeBV5I9FI4AbiylHN/rVVJ7mzyklPLbKc2o5uIjD5IkSVKfiNgQ+GYvWFCfx388sAM528OJwLlTmEWNoo538f0aTAjgPOBW4G/Av4BfOEXk9BMRzydn3rim/n0rcGpELFnHwFgf+J3BhOnHWR4kSZKk+7sdWCwi3hERq5ZS/lJKuYpsoO4ZEbtNcf40ijrexcyIOAt4PfCpUsoBwJuA5ckxMDT9fA04LyJmRMQiwE/JmVUeVd9/J7D4VGVOYzOgIEmSJPUppfwF2J+8s31YROxW75b+EPgGOa6CpqdDgXvIfbRORCxRSvk1cBuw3JTmTKMqpXwb+EQp5b5Syj/rYJmfA7aMiGPJR1R+NbW51GgcQ0GSJEnq03hueztgR3IchfXIsROeBGxbSvndFGZRo2gM4LcluZ92BP4A/BzYA3hMKeX3U5hFtRQRqwEfJ6eK3LH2PtE0Y0BBkiRJ6tNrmNbXSwIPAxYjgwpfLaV8fyrzp9E1poWkTjO4Edl1fi1yTIwfTGX+1E5EzABmAp8Cri+lHD/FWdIYDChIkiRJo4iIhYB7e4EFLRjqYIwUGzrTWkSsDPyhlPKPvuUzPOYWHI6hIEmSpAe9SM+NiBdHxDERsXgp5T+1+/yM+pn9e681fUTEdhHxmIjYoresGUyIiF17QQZND3V/vIUcKHOsz7wpIma676Y3p42UJEmS4ERgS+Dy+vuXEfHWUsopNaiwBrCMd06nlzpgX28mgO9FxK9LKXc13t8EWMveCtPOG4C/l1Juq7M6LA5sWUq5ph5vs4E/l1LundpsahAfeZAkSdKDWkQsBXwBeFYp5Ud12RbkVHV/BZ5PTiMZpZT/TlU+Nbc6aN+15DgJSwGnkuNcHFxK+U/9zAxgoVLKv6Yso5pLRKwI/BDYq5TyxYg4EVid3I9Lk8fhN6Yyj2rPLluSJEl6UKujx19CzgjQW3ZTKWV74NvAVqWUew0mTDvPAD5fpxr8I/AScjaOVQAiYh9gTYMJ086KwIeBXSLiTODJwCtLKVsC7wF2msrMqRsDCpIkSRJ8HXhBRJwTETMby28EnuvYCdPSmcCZdfyLWXVwv18CD4+IVYBXA3dMaQ51P6WUm4ETgO+Ss2+8rZTym/r2TcCOfcegpjEfeZAkSdKDVkQsD/y+Pre9FPAu4InAacDPyLvel5RS3jWF2VSfiFi/lPLjxt+zSyn/jogDgR2AOcCnSilnTVkmdT91LJJ/AguXUn4VEQ8F/gH8tZRSIuJK4JOllHOnMp9qz4CCJEmSHpQi4lnAHsAiwM+BbwEXk89yHwn8ArivlPLmqcqj7i8ingccDnwIuLqUcmvjvaXJO9+3lVIePUVZ1Cgi4gDg6cBdwEzgN8B7Syk/qe/vDTyvlPLkqculujKgIEmSpAediFgYuAXYGVgUeC6wPfnow2mllJ9PYfY0joh4BjlQ5ufIwfw+B/wUWKGU8smIeBVwQynl81OYTTXU4+2HwBPIHglPB54N/Bq4sJRyZURsBPyzlPKzqcupuvJZMEmSJD0YbQzcVEq5tZTyXXIau58CdwKvBIiImML8aWyfAv4A3At8FdgWuBrYDqCUcqrBhGlnZeBbpZSfllJ+DZwD/Ai4khyccZFSyg8MJix4DChIkiTpwegWYEZEnBkRu5FBhD+XUk4EVouIrYpdeaelUspfyUH9KKVcCPyJfC7/IRFxUkTMnsr8aVS/BmZGxCfqow9vhP/tv2VpzLCiBYsBBUmSJD2oRMSMUsrfgBeQswC8Cgjg9fUjfwM2m6LsqZ3bgK0j4mnAc4AXkuNfXFdK+feU5kz3U0r5VynlqcDHgIPJAMPL69v3AutMVd40OY6hIEmSpAeNiNgCeBKwOfDCUsrv6vKZpZR7a2+FE4AtSyn3TV1O1RQRmwBbA9sAry2l/DEitgTeAfyglHLolGZQo6r77bHk4yivakwP2Xv/icDJwNYebwsmeyhIkiTpweRc4O76+oSIWKMOGNeb934F4GQbN9PO+cAS5HSQL4+IZUsp3wSOZWTMC9s2088FwML19dcj4nF97y8OvN7jbcFlDwVJkiQ9KETEy4HHlFL2iohFyekFv0kGEa4spZwxlfnT6CLiFcCjSil7R8TqwCfIKT43AM4opVwypRnUqCLipcAO9VEHIuJQYKVSyvFTmjENlVE8SZIkPeDVGRv+DLy6Lnol8O1SyjPJsRNeEBGPmar8aXQRMROYDby2Lnoe8NNSyoHASWQvk42mKn8aXWOGlHfUv2cCXwN2a3zmnIhYcgqypyGyh4IkSZIeFCJiEeBf5ACMmwA/6g3gFxFvA75fSnnPFGZRDRERpZQSEYuVUv5e99+O5MCL/6mfORv4or0Upp+IeAiwWCnltsayLwNHkeOYbFxK2W+q8qfhMKAgSZKkB7Xajf4qYJdSyl1TnR+levd6cWDtUspX6rLoTecZESuTjz88qZRy99gpaX6KiDWA1UspX2os6wWH9gb2A9YEnlxKuWeKsqkh8ZEHSZIkPaBFxN4RsUqjG3bzvSWBY4ArDCZMO+cCbwKOiYinADSCCcsCJwIfNZgw7ZxLBgz+p4zcxf4OsCvwYYMJDwyzpjoDkiRJ0rwSEfsDFwIfBC6PiOtLKX9qfOQ+4APNu6maehGxDzmjw/OBpwJPrF3oNwLeDtwDXFtKuWjqcql+db8tUsr/t3c3LzbFcRzH398Go4TIymORJCVJ5CFZyYqUiSwmZSPKQkRYiZLkISvKQnb+AmFhpUTJkmSpyZKSML4W53dzXI+31Jl77vu1Orc7i299msX53PP7nrxdPu8GlgNzgAuZ+SoiDmXmrSbn1P/jEwqSJElqsxXAPuABcAQ4FxHrImJy+X6LZcKENAKcy8x3wAKq3QmfqfZfXAe+WCZMSCPA64iYERHHgFGq17R+BK5FxJBlQru4Q0GSJEmtU443dJYvjmXm24iYDRwFNgB3gIXA6szc2tykqiu5TQI2ZubDsojxAnC2ZDgM3ASuZuaTJmfVdyW3IWAHsAqYXa7XZOZYyfEmcDkznzY2qP47CwVJkiS1TvkldLxcT6H6Rftr+bwUOAPsorrhed7cpKrrym0oM8cjYmb9mEpEPAP2ZOaLxgbVD7pyWwhsA8jMG7W/MbcWslCQJElS60TENWAYONVZ/laKhfFyk3oeWORr6yaWkttU4GQtt/rN6iWqM/oHGxxTXWq5ne4sN42IGeXIChFxkeoVkubWMu5QkCRJUqtExFqqpw++Ag/KWW4y81MpE2YBj4H9DY6pLrXcxvkxt06ZsBKYDxxvbEj9pCu3exFxAqBWJiyjWrBpbi1koSBJkqQ2upqZB4DDwKaIeBQRO8t3o8DLzPzQ3Hj6jT/lthY4n5nvmxtPv1HPbX1XbpuBK+bWTh55kCRJUutExKTM/FKuJwN7qYqEucCUzFzS5Hz6tT/kNo8qt8VNzqdf+8v/27C5tZeFgiRJkgZCREwH3gAjmXm36Xn0b8ytP5nbYPDIgyRJkgbFduC+Nzd9x9z6k7kNAJ9QkCRJ0kCIiCFgWmdZnPqDufUncxsMFgqSJEmSJKlnHnmQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9s1CQJEmSJEk9+wbaJqoZ/Z7SCAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAGMCAYAAABanuA0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAABriklEQVR4nO3debxvU/348dfbvbiEi1zzmAaFDEkSvwzpq1A0a0JKg4pmSbmFUpkqvkoD+jZQKgoNMnxLAwmRNI+kaBAN3++3WL8/3uvT2efj7PPZn3M+1zn3ej0fj/M457M/n73O2uuz9t5rv/faa0UpBUmSJEmSpIksNdMZkCRJkiRJs5eBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJJmRETcEBE7zXQ+FrWIODoi/hARv5vBPOwUETfN1P+fDSLiYRFxVURE3/KFEbHhDGVrkZrOPhYRD4+Ib442R5KkxZWBA0nSyEXELyPicX3L9o+Iy3uvSymbllIuG5DOhhFRImLuIsrqIhUR6wOvAR5WSllzpvMzG0R6eURcFxF/j4jfRcRlEfGsxmcui4j/iYg7I+KOiPhuRBwWEcs2PrMwIv4ZEX+NiNsj4psR8ehJ/vVRwHGllLIot282ae5jtbw+NsS61wG3R8Reiyp/kqTFh4EDSdJ91r0QkFgf+GMp5dZhV1xcgyUdvBc4lAyo3B9YBzgC2L3vcy8vpawIrFU/+yzgwr4eA2eXUlYAFgCXA5/t71EAEBFrATsD5zaWbRcRVwKvB66OiMsjYoWRbOEkFrPv9ePAi2c6E5KkmWfgQJI0I5q9EiJi29qN/I6I+H1EnFA/9rX6+/Z6Z/nREbFURBwREb+KiFsj4qMRMb+R7vPre3+MiDf3/Z+FEXFORHwsIu4A9q//+1v1rvUtEXFyRCzTSK9ExMsi4if1DvhREbFxvcN9R0R8qvn5xnqPAy4C1q55P6Muf1LtQn57vbP+0L4yeUNEXAf8baKLzIjYNCIuiog/1bI6vC5fNiJOiojf1p+Tmnfo+9IoEfHAxuszIuLo+vdOEXFTRLy+lu8tEbF3RDwxIn5c/+/hjXUX1jL4aC2fGyJim5b/+2DgZcCzSikXlVL+UUq5q5RyeSll/4nWKaX8rd41fxLwaGCPCT7zT+BMYE0yGNFvN+DqUsr/NJadCXwKOK6muxC4qyXf+0fEN2rd+EtE/DAidm28Pz8iPlzL6ubIx1Pm9K17YkT8sf6f/vTnRMThEfGzWobfjYj16nvviYjfxFjPix0b6/Xq89l1vasjYovG+7+MiMdFxO7A4cAza138Xn3/gIi4sa7784joDxJcBuzaVo8kSfcdBg4kSbPBe4D3lFJWAjYmL+gA/l/9vXIpZYVSyreA/evPzsADgBWAkyGfYwf+E3gOead6PnlHu+nJwDnAyuQd1buAVwGrkReQu5IXt03/ATwC2I68Q30a8FxgPWAzYN/+DSqlfBV4AvDbmvf964XzJ8k77guAC4Ev9AUe9iUvjlcupfyrmWZErAh8FfgSsDbwQODi+vabav62BLYAtiXv5E/FmsA8suzeAnywbu8jgB2BN0fERo3PPwk4iyzTz1O/jwnsAvymlHLVsBkqpfwauKr+/3Hqhe3+Ne0/TLD65sCP+patAXwLuBv431LKV0sp/5gkC48CfkbWkyPJ3g2r1vfOAP5Ffh9bAY8HXti37s/r/zxmgrRfTX7vTwRWAl4A/L2+9x3yO10V+ATw6YiY11j3ycCnG++fGxFLNxMvpXwJeDu1h0YppRdcuBXYs/7PA4ATI2Lrxno3A/8EHjJJuUiS7gMMHEiSFpVz61312yPidvKCvs0/gQdGxGqllL+WUr49yWefA5xQSvl5KeWvwBuBZ9W7808DvlDvYP8fedHb/0z7t0op55ZS7q53vL9bSvl2KeVfpZRfAh8AHtu3zrtKKXeUUm4Avg98pf7/vwBfJC8Wu3gmcEG92/5P8m73csD2jc+8t5Tym5aL2D2B35VSji+l/E8p5c5SyhWNcnlbKeXWUsptwFuB53XMV79/AsfUPJ5FXiy/p/6/G4AfkMGJnstLKReWUu4C/qvvvabVgHGDRNbeDbdHjmmwwYB8/Za8QO55Rq1bvyGDGvu0rLcycGffsneTjy48A3h6RKwx4H/fCpxUSvlnKeVsMhCxR13vicChtXfErcCJ5KMV/853KeV9tY5N9L2+EDiilPKjkr5XSvkjQCnlY6WUP9Z1jweWZfyF/HdLKefU7+oEMuCz3YBtoaZ9QSnlZ/V//jfwFe4ZmLmTLD9J0n2YgQNJ0qKydyll5d4P97yL33Qg8GDghxHxnYjYc5LPrg38qvH6V8Bc8m7u2uRFJACllL8Df+xb/zfNFxHx4Ig4P3KQvjvIO7Or9a3z+8bf/5jgdddn48flvZRyd81Ps1fEb/pXaliPvOs9MO3699od89XvjzUIALl9MPk2N4MBfwfmTfSYBfldrNVcUEpZlyzvZYF7jE/QZx3gT43Xn6r1a/VSyi6llO+2rPdnYMW+/3sMGbC5kexR8qOIeOQk//vmvoEVe+W7AbA0cEsjSPYBYPXGZyf7TmGS7zUiXlsfJ/hLTXs+4+tns77fDdxEx+89Ip4QEd+uj5/cTgZA+uv+isDtXdKTJC25DBxIkmZcKeUnpZR9yYutdwLnRMT9uGdvAci7zs070+uT3cR/D9wCrNt7IyKW457PvPeneSrwQ+BB9VGJwxl8ATtV4/IeEUFeNN48Sf6afkM+njEwbbJcftvy2b8Dyzde31szPlwCrNs2BsJk6jP/jwC+PoX/ex0ZmBqnlPIT4Hryjv/ZwH6TpLFO/b56euX7G+B/gdUagbKVSimbNv/VgPz9hnxEZ5w6nsHryV4Rq9QA3F8YXz/Xa3x+KbL+T/S9j8tDfbzjM2SvlzVq2hc2046IdYBluOdjHpKk+xgDB5KkGRcRz42IBfWO6e118d3AbfV382L5k8CrImKjyFHwe89u/4scu2CviNi+jhuwkMFBgBWBO4C/RsQmwEtHtFkT+RTZvX3X+hz6a8iLzm92XP98YK2IODRyMMQVI+JR9b1PAkdExIKIWI18TKNt+r1rgWfXQfl2556PZiwSpZQfkXfjz4qI3SJiuTqI4PZt60TE8hHxWOA84Ery4nZYFwFbN8cGiIgXNQb9m0eOT/D7iVauVgdeGRFLR8TTgYcCF5ZSbiG7+B8fEStFDt65cc1zVx8CjoqIB0V6eETcn6yb/yL3g7kR8RZyPIKmR0TEU2oPj0PJ+jTRoz6/BzaswQXIgMCyNe1/RcQTyLEZmh4LXFJK+d8htkWStAQycCBJmg12B26IiL+SAyU+q44/8HdyMLlv1G7g2wEfIZ+j/xrwC+B/gFcA1OfvX0E+l38L8Ffy2fTJLnxeCzybfJb7g+Sd50WiXjg/F3gf8AdgL2CvOh5Dl/XvJGcI2It8POAn5CCRAEeTgwdeR95Fv7oum8ghNY3bybERzh16Y6buYHJKxhPIxw5uAo4ix3/4deNzJ0fEneQF70nk3fHda3BpKKWU35O9HZ7cWLw9eSf9ELIXw5/r/2lzBfAg8ns7BnhabxwC4PnkhfgPajrn0PdIxgAnkEGlr5BBrA+TY198mRwI88fkoxH/wz0feziPLLs/k2NaPKWOd9Dv0/X3HyPi6lqXXln/75/JfeDzfes8B3j/ENshSVpCxfjH9SRJWnLUHgm3k48h/GKGs6MZVGfcOBPYtjlWQUQsBM6oA2O2rbs/8MJSyg6LOJtDqXl/YCnluYsg7YcDHyilPHrUaUuSFj/2OJAkLVEiYq/avf1+5PPb1wO/nNlcaaaVUn5QSnlk8Y5JJ6WU6wwaSJJ6DBxIkpY0TyYHh/st2bX8WV4sqk0pZeFkvQ0kSZKPKkiSJEmSpEnY40CSJEmSJLUycCBJkiRJklrNvTf/2WqrrVY23HDDe/NfSpIkSZKkAb773e/+oZSyYKL37tXAwYYbbshVV111b/5LSZIkSZI0QET8qu09H1WQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJajV3pjOwONjwsAumtN4vj91jxDmRJEmSJOneZY8DSZIkSZLUysCBJEmSJElq1TlwEBFzIuKaiDi/vt4oIq6IiJ9GxNkRscyiy6YkSZIkSZoJw/Q4OAS4sfH6ncCJpZQHAn8GDhxlxiRJkiRJ0szrFDiIiHWBPYAP1dcB7AKcUz9yJrD3IsifJEmSJEmaQV17HJwEvB64u76+P3B7KeVf9fVNwDqjzZokSZIkSZppAwMHEbEncGsp5btT+QcRcVBEXBURV912221TSUKSJEmSJM2QLj0OHgM8KSJ+CZxFPqLwHmDliJhbP7MucPNEK5dSTiulbFNK2WbBggUjyLIkSZIkSbq3DAwclFLeWEpZt5SyIfAs4JJSynOAS4Gn1Y/tB5y3yHIpSZIkSZJmxDCzKvR7A/DqiPgpOebBh0eTJUmSJEmSNFvMHfyRMaWUy4DL6t8/B7YdfZYkSZIkSdJsMZ0eB5IkSZIkaQln4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYDAwcRMS8iroyI70XEDRHx1rr8jIj4RURcW3+2XOS5lSRJkiRJ96q5HT7zv8AupZS/RsTSwOUR8cX63utKKecsuuxJkiRJkqSZNDBwUEopwF/ry6XrT1mUmZIkSZIkSbNDpzEOImJORFwL3ApcVEq5or51TERcFxEnRsSyiyqTkiRJkiRpZnQKHJRS7iqlbAmsC2wbEZsBbwQ2AR4JrAq8YaJ1I+KgiLgqIq667bbbRpNrSZIkSZJ0rxhqVoVSyu3ApcDupZRbSvpf4HRg25Z1TiulbFNK2WbBggXTzrAkSZIkSbr3dJlVYUFErFz/Xg7YDfhhRKxVlwWwN/D9RZdNSZIkSZI0E7rMqrAWcGZEzCEDDZ8qpZwfEZdExAIggGuBlyy6bEqSJEmSpJnQZVaF64CtJli+yyLJkSRJkiRJmjWGGuNAkiRJkiTdtxg4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa3mznQG7lMWzp/Gun8ZXT4kSZIkSerIHgeSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1Gpg4CAi5kXElRHxvYi4ISLeWpdvFBFXRMRPI+LsiFhm0WdXkiRJkiTdm7r0OPhfYJdSyhbAlsDuEbEd8E7gxFLKA4E/AwcuslxKkiRJkqQZMTBwUNJf68ul608BdgHOqcvPBPZeFBmUJEmSJEkzp9MYBxExJyKuBW4FLgJ+BtxeSvlX/chNwDqLJIeSJEmSJGnGdAoclFLuKqVsCawLbAts0vUfRMRBEXFVRFx12223TS2XkiRJkiRpRgw1q0Ip5XbgUuDRwMoRMbe+tS5wc8s6p5VStimlbLNgwYLp5FWSJEmSJN3LusyqsCAiVq5/LwfsBtxIBhCeVj+2H3DeIsqjJEmSJEmaIXMHf4S1gDMjYg4ZaPhUKeX8iPgBcFZEHA1cA3x4EeZTkiRJkiTNgIGBg1LKdcBWEyz/OTnegSRJkiRJWkINNcaBJEmSJEm6bzFwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSq4GBg4hYLyIujYgfRMQNEXFIXb4wIm6OiGvrzxMXfXYlSZIkSdK9aW6Hz/wLeE0p5eqIWBH4bkRcVN87sZRy3KLLniRJkiRJmkkDAwellFuAW+rfd0bEjcA6izpjkiRJkiRp5g01xkFEbAhsBVxRF708Iq6LiI9ExCqjzpwkSZIkSZpZnQMHEbEC8Bng0FLKHcCpwMbAlmSPhONb1jsoIq6KiKtuu+226edYkiRJkiTdazoFDiJiaTJo8PFSymcBSim/L6XcVUq5G/ggsO1E65ZSTiulbFNK2WbBggWjyrckSZIkSboXdJlVIYAPAzeWUk5oLF+r8bF9gO+PPnuSJEmSJGkmdZlV4THA84DrI+LauuxwYN+I2BIowC+BFy+C/EmSJEmSpBnUZVaFy4GY4K0LR58dSZIkSZI0mww1q4IkSZIkSbpvMXAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVKrgYGDiFgvIi6NiB9ExA0RcUhdvmpEXBQRP6m/V1n02ZUkSZIkSfemLj0O/gW8ppTyMGA74OCIeBhwGHBxKeVBwMX1tSRJkiRJWoIMDByUUm4ppVxd/74TuBFYB3gycGb92JnA3osoj5IkSZIkaYYMNcZBRGwIbAVcAaxRSrmlvvU7YI3RZk2SJEmSJM20zoGDiFgB+AxwaCnljuZ7pZQClJb1DoqIqyLiqttuu21amZUkSZIkSfeuToGDiFiaDBp8vJTy2br49xGxVn1/LeDWidYtpZxWStmmlLLNggULRpFnSZIkSZJ0L+kyq0IAHwZuLKWc0Hjr88B+9e/9gPNGnz1JkiRJkjST5nb4zGOA5wHXR8S1ddnhwLHApyLiQOBXwDMWSQ4lSZIkSdKMGRg4KKVcDkTL27uONjuSJEmSJGk2GWpWBUmSJEmSdN9i4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYDAwcR8ZGIuDUivt9YtjAibo6Ia+vPExdtNiVJkiRJ0kzo0uPgDGD3CZafWErZsv5cONpsSZIkSZKk2WBg4KCU8jXgT/dCXiRJkiRJ0iwznTEOXh4R19VHGVYZWY4kSZIkSdKsMXeK650KHAWU+vt44AUTfTAiDgIOAlh//fWn+O80253ykkumvO7B799lhDmRJEmSJI3SlHoclFJ+X0q5q5RyN/BBYNtJPntaKWWbUso2CxYsmGo+JUmSJEnSDJhS4CAi1mq83Af4fttnJUmSJEnS4mvgowoR8UlgJ2C1iLgJOBLYKSK2JB9V+CXw4kWXRUmSJEmSNFMGBg5KKftOsPjDiyAvkiRJkiRplpnOrAqSJEmSJGkJZ+BAkiRJkiS1mup0jJpBm5+5+ZTXvX6/68e9vnGTh04pnYf+8MYp52Eyxz9zzymv+5qzzx9hTiRJkiRJYI8DSZIkSZI0CQMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWpl4ECSJEmSJLUycCBJkiRJkloZOJAkSZIkSa0MHEiSJEmSpFYGDiRJkiRJUisDB5IkSZIkqZWBA0mSJEmS1MrAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSp1cDAQUR8JCJujYjvN5atGhEXRcRP6u9VFm02JUmSJEnSTOjS4+AMYPe+ZYcBF5dSHgRcXF9LkiRJkqQlzMDAQSnla8Cf+hY/GTiz/n0msPdosyVJkiRJkmaDqY5xsEYp5Zb69++ANdo+GBEHRcRVEXHVbbfdNsV/J0mSJEmSZsK0B0cspRSgTPL+aaWUbUop2yxYsGC6/06SJEmSJN2Lpho4+H1ErAVQf986uixJkiRJkqTZYqqBg88D+9W/9wPOG012JEmSJEnSbNJlOsZPAt8CHhIRN0XEgcCxwG4R8RPgcfW1JEmSJElawswd9IFSyr4tb+064rxIkiRJkqRZZtqDI0qSJEmSpCWXgQNJkiRJktRq4KMK0n3dwoULZ2RdSZIkSZoN7HEgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTKwIEkSZIkSWo1d6YzIGl4a1567ZTW+93OW440H5IkSZKWfPY4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElq5XSMWmLddNjXp7TeusfuOOKcSJIkSdLiyx4HkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWc2c6A5J0X3H8M/ec0nqvOfv8EeckLVy4cEbW1exxyksumdJ6B79/lxHnRJIkzWb2OJAkSZIkSa0MHEiSJEmSpFbTelQhIn4J3AncBfyrlLLNKDIlSZIkSZJmh1GMcbBzKeUPI0hHkiRJkiTNMj6qIEmSJEmSWk03cFCAr0TEdyPioFFkSJIkSZIkzR7TfVRhh1LKzRGxOnBRRPywlPK15gdqQOEggPXXX3+a/06SdNNhX5/yuuseu+MIcyJJkqT7gmn1OCil3Fx/3wp8Dth2gs+cVkrZppSyzYIFC6bz7yRJkiRJ0r1syoGDiLhfRKzY+xt4PPD9UWVMkiRJkiTNvOk8qrAG8LmI6KXziVLKl0aSK0mSJEmSNCtMOXBQSvk5sMUI8yJJkiRJkmYZp2OUJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVKr6QyOKGkIF1+y8ZTX3XWXn40wJ9Lsteal10553d/tvOXI8qHhHP/MPae87mvOPv/ff9902NennM66x+445XUlSdLk7HEgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKmVgQNJkiRJktTK6Ril+7AND7tgyuv+8tg9xl4snD/1TCz8y9TXncSNmzx0yus+9Ic3/vvvU15yyZTTOfj9u0x5XUmSJGm2sMeBJEmSJElqZeBAkiRJkiS1MnAgSZIkSZJaGTiQJEmSJEmtDBxIkiRJkqRWBg4kSZIkSVIrAweSJEmSJKnV3JnOgCQ1bX7m5lNa7/r9rh9xTjSMiy/ZeErr7brLz0ack7ThYRdMed1fHrvHCHMyZqp1G6zfU7Fw4cJ7db3F0sL501j3L6PLxyJwyksumfK6B79/lxHmRJKWDPY4kCRJkiRJrQwcSJIkSZKkVgYOJEmSJElSKwMHkiRJkiSplYEDSZIkSZLUysCBJEmSJElqZeBAkiRJkiS1mjvTGZAkaVab6lz3i2ie+xs3eeiU133oD28cYU7uGy6+ZOMpr7vrLj/7999rXnrtlNP53c5bTnnde8PmZ24+5XWv3+/6EeZEkkZrST52D8seB5IkSZIkqZWBA0mSJEmS1GpagYOI2D0ifhQRP42Iw0aVKUmSJEmSNDtMOXAQEXOAU4AnAA8D9o2Ih40qY5IkSZIkaeZNp8fBtsBPSyk/L6X8H3AW8OTRZEuSJEmSJM0G0wkcrAP8pvH6prpMkiRJkiQtIaKUMrUVI54G7F5KeWF9/TzgUaWUl/d97iDgoPryIcCPpp7dWWk14A+zKJ1RpjXb0hllWrMtnVGmNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2d2WSDUsqCid6YO41EbwbWa7xety4bp5RyGnDaNP7PrBYRV5VStpkt6czGPLlti2ee3LbFM09u2+KZJ7dt8cyT27Z45sltWzzz5LYtnnly25Yc03lU4TvAgyJio4hYBngW8PnRZEuSJEmSJM0GU+5xUEr5V0S8HPgyMAf4SCnlhpHlTJIkSZIkzbjpPKpAKeVC4MIR5WVxNarHMEb5OMdsy5Pbdu+mNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2dUaY129IZZVqzLZ1RpjXb0hllWrMtnVGmNdvSGWVasy2dxcKUB0eUJEmSJElLvumMcSBJkiRJkpZwBg4kSZIkSVIrAwdLsIhY4r7fiIiZzsNEZmu+pCWd+969x7K+d1ne9y7LW5Imt8RdWAoiYkFELFVKubu+ntLJMCI2jYjHRMS8ZjrDphcR94uI5SNi/nTyUy0fEctFxBrTSSci5kbEtAYHrenMj4godbCQmS7r+5LZWDajytNs3LZRiIj1R51mGeFAPRGx1JJU9hHxkFGmN8qyhiWyvBeMMr0ltW6PIi+jLmuwvAekMe1jd0Q8OCK2jYgdaxtsqbp82uU0W869ETFnujftFkXdHqXZVLdHYdTnySWdgyNOQ0RsAWwL3ABcAdw9nRNPRKxDznRxVynlpmmk8xXgb8AHSilfmkY61wHvKKV8cqpp1HQ+CKwH/Bh4XSnlf6eR1odqWj8H5gGfKaWcP4V03gZsD1wNnFZK+ekU8/O5mpdPl1K+PZU0ajojKesJ0p1bSvnXiNP8d1BqCuvOKaXcVf+O6TbUmulNcf0HA7eVUv48nXzUtJYrpfxjuulMkO6sKO8RrP8MYEfgDaWUv08n3Yj4f8AjgM2AhaWU39TlQ5VV/f5XBNYBri+l/GKK6axZ01kG+FUp5a+dN+aeae0J/KOUcnF9PaVyj4hnA08G9iul/M808jOSsq7rjKq8NwJWAdYEriyl/GGYfDTSGUlZ13WfAjwMOKn5/U9h22ZV3a7rjKR+R8SjgD/2zrf1AqvAcBftoyrrus6sKu9R1e2a1qjKe9rH7nqheRnwh5qH15RSfjXV81ttL9+PbAfeVkq5Zdg81c8/DFgbeBBwfu/7n0J+dqn5uL6+nkNuZ5nBur0FsBawOvBZ4O+llLunUEajPJbMqmP3qM6TNa3+8v7bqIPts4GBgymKiKcCR5LTUe4FXAl8HLi4lFKmsGM+D3guMAf4OnD8sCfnemCeB5wF/BkI4H+BtwGPBr4C/LXLhVY9UbywlPL4egB8MbmjrwR8sJRyQ8c8vQLYHTgYOBE4D7ij5uuKYQ4aNa19gGcCDwYeAuwB/BJ4Vynl9x3TeRmwJ3A4sD+wA/BE8qA/zElwReAiMvjwv8CtwPuBrYFvkQe1gQfVUZX1BPl7dU3nR6WUDzXe63ywr+kcAKxQ83NiKeVvU0znYGAj4K/A26caQKppHQ0c02vERMQypZT/m0KeLgSOKqV8s/+9IevCW8k6+SOy8dnrgTKVE+veZABxBeDsXjBiCnmadnnXdD4AHF1K+fWw+ehL56fA80op34yIFYDNgR/2gjZd061pXQN8kDwGfBP4Ktko+vtk606QzreAXwG/B7YDvga8ZQrpnAfMJ4OIXyilfLbr+hOk92VgF+BU4D2llJ/FkAHAmqcfk42hb0bEGsBW5HdwSynlb13Ke1Rl3UhrVOX9VeBO4HfAjaWU9wyTl0Za0y7rRp5+DjynlvfSwMbAr6ewbbOmbjfSGkn9johrgQXAGcA7Syl3TCGNkZR1I61ZU96jrNs1vWsZTXlP+9gdEccAq5VSXhwRbyXbJn8B7gLOK6V8e8hzwJfJXtM/A+5PfnenDHOeq+l8nTxv/63m5YheG6frttXPXQlsA3wYeH2Zws2IRVC3vwV8jwywvKuUcl1ELF1K+ecU0hnVsWTWHLtHdZ5spNUs73eWUq4ftrwXBz6qMHXPI6OvhwEPB64HjgCOgqGjufPIi9hXkhd7WwDHRNU1nZL+AZxMBg3+E/gJ8GngeGDpLkGD6rfkDgDwRnIH/Q1wG3BwvcAdtF1LkXf131NK+SV5Yf1M4DnAf5ABl2H8g2y03FZK+QYZIHkb8K+a3kARsQzwIuDIUsq1pZRDgeuArRsXewO75NWyvoOcv3VV8qS1LHmCPgGYO8QF47TLegJvAXYiD4qbR3YL3LrmfZgL2WPI7+zvZET+VxFx2BTSOZL8jq4ke4y8coh1++1JltNPI+KImpf/m0Ke3g78oZ4wlouIh0bE82oQYpgL4yOBh5IX2GsC94+Ix00hP73G1cHkHbDtgW9GxItrWsPmaRTl/SjghcB5EfGWiJhXSim18TiMrYHralkvII9JbwKuiYi3wFDb9wbghlLKKYwdA95D1s3XDpGn15GNg2eSx+79yAbohRGxA/y7MTDI0cBvSymPJYOz74yIRwyRj37vI+9WzAEuiojXkOW/+RBpPAD4Xi3vVYGzyYDkx4A3Q+fyHlVZw+jK+xgyGLo38Eng+RHR6fg/gVGUNcCujNXvNWu+jgJ+OWQ5zba6DSOo343/9QmybJYD/jsinhMRp0TEWkO0d0ZV1jD7ynskdXvE5T3tY3fkY6HLkhdWkBdnc8n26V/IG0LDnAOOBP5cSnkc2d46jQz8vD9qt/Mhjt3Xl1IOJI8FDwL2bn6gY3AV4CTy5tjdZF08JCLOiYj1o3vX/lHW7beR2/Zi4NvAsRFxMvDxyLv1XctolMeS2XbsHtV5Eu5Z3u/sL+8lRinFnyF+GOulcRTwXmD1xnsbknegjxwyzUOBTzZerwp8Hrh/fb0dsN6QaR4DbF7/vp484V8EPK7j+uuQ3cqOAU4Hlq/L1wA+B2zbsZxeBnyXvEj7c+P9J9U8bTDENj2SjAQe1lg2B9i5btsDOqQxnwwcrEJe3AO8lLyzD3AY8Koh8jQHWFjLZUXyQv0i4BTgkfdGWU+Q3gLgRmDF+vob5Mn+K8B/A5t1TGclsjfFJo1lWwGXA1/q+t2R3QB/DKxQX+9Qt/cB9fVSQ27fKuRB/WnApWTj433AGb3vpOP3dizw1Pr6I+SdgqvIQNKuHfOyat2/1quvLydPZleSdw4ePeR2/RhYq75eGtiN7BVxCvV4cG+WN7A8ue++DPhMLZ/TgHOG/M5WBM4B/h/Z6DyyLn8YeZLdaoi0nkT28FgdOJcMTEI2cL9Kh+NA/fxjgeP6ls0FXkL2sOiSxso1/w9vLHsr8Nr69/3IHlfD1vF3ko3Ylcmg4j+Bpwyx/rxaDw8gg4iH1+UbkXfYdhuirN823bIeYXn3enlt3Vh2MHl3p/f+/tRj+71R1jWNBeRF2prkOeTtjXLqfJ4ju8yOom7vNN2yXhT1u5bzOWTvrO2A75M9EB8zRJ7uP4qyHmX9nsV1exTlvSJ57J/WsRt4PPAn4Hzgl43l6wCXMEQ7B3gKjXY2ea58ANkWe03HNJYjewk3y/tpwEca2/0GYJmO6c0nz40L6jb9jOzB8B/3dt0mbxy+Hji4vv4ceWNj51qHTiNvJN4rdXsR1u/Zcp4cWXkvDj/2OBhSqbWC7No2D/iPiFi73on7JRmN2zwilh8i2c8AH4gcVGVeKeVPZNf3bSNiOTKiOuyz098Del3CehHd08huea2iDmBYSrmZjLytR0ZBD64fuZMMkAwag2Fe/f1fNf+/Ab7Su1NRSvk8ebIe2NUpIlas63yHPGFsHRFnR8ReJXtQXEEeqCd9tKPeSf4LeWC+o4x1afoisF5EbAs8m9zpO6n//8/kHd0Xkr0EDiUvZjuNm1DL+qXA+mRZv7y+1bWs+/0feYH34BrpXIfsybIPeaG9U8d83UHehXlsY9k1pZQdyPq1Tcf8PAj4aKmP3pRSLicvzjer778jIp7UJaGIiJJdAG8mGyw7kw2/F5L1YvXSoVdN/czXgSfWCPUapZQDSynbkHcOnt4xin4nGYx5VEQcQF60P6+Usi0ZzHhil+2qefozeRLbpr7+ZynlIuAg8kTdNZK+EfCxUZR3ya6Il5ANlqcC7yD3kQ0jYpNB6/fKsJRyJ/ApMgCxFvDjyMc4fkAGubbouG0APyT3k3eRDawP1f9xNXA72Uuii58Dj4yIL0fExjWNf5HHrB0iYvtBCZRSbif39+YjVxcxto8dC2xRuj8+0+td9G3gBTX9u8g7YydFxEu7pFPyWc13kBcLawN3RY7B8QtyTJ4HdUmHvNDYkeyxthZTL2vIY+LWEfHViHhgTWfY8r4TeA15AdJzGRkcgyyn9ctwjxpcwTTKOrIr6m1kPT6dLO8v1PxeTZ4ftuuQTgDXko3N6dbtHwLbTKdu12Pt7WT9vq3x1pTqd+R4Kz8hL4AeVnJMoEIGbD8a+bhmF38i6+UZTLGsG24gz2/vZnrl/VOyvL8yjWPJnWRZj6RuN8r7q8CmUy3vmq+zgFdQg9LDHLsjYtm6j3wF2IQ8X385ak86sq27KmO9Lrv4HvCUiDgzItar58qfA2cCe0bEph226x9kT9/bG4uvALaM7C37VmClUnszTqbuK38h2xTPqm2628n94/zIx2y7uJ2sk1M+jtTPF/KG0QkR8SXyZtGLSymXkufhjchAUBfTPk/W8unV7z823rqMKdTv2XaebJT38ZFjzG0+jfKe/WY6crG4/ZCPd6xB3rHckrzo/yB5UbYXeQL67BDpRcvyV5DRtPcDx3ZI54VkYOAZjWWHkI21R9XXAyOn5B3Tb5M7MIxddF5HnvD/ixzEb1A6pwJ79i17Zi2fo2q5vbtjGX0JOJCxXgxrkAGaS2teP0eOCTEonTfTd7edsV4HJ5LBjVd3SGd/8uLufo1lR1EPsPX1Ch3S2Y08aD2+vl6d7Bb2vWHKuiXtN5IBh9OAExrL965pD7wrXz//2Jqf/2yuAzwduIDud5o2qvtM7zs8oOZtfTLIsdwUtvEE8o7X88i7KkfVbe4aSV+JbCh+hjxRb1CXb0z2GOgU+QaeSj7z9y5qz5W6fIe6P3Uq67rOXuSzf+9u7q9kdP+/aDleND63cv29/nTKu/97JU/ID6j73TnAa2u9mLSsycDXUo3XB5MXR5eQQZWXkcHMgd8/eSxq1sFVyEDie8lusDuQvZsGbduCvjy9m2wknEReHD+evPCaNyCduf11pJb5fLKxcGjXut2fFhl4fT95gfS5Zvod69Gc+vvp5J3UL5PnlCNq+S8/YP37kXfkmnnam3wMrnNZ1/WW6UvnsLpd7xmyvO9xrCHPx0uT5+C3d6zbqwMPJC9+g+xV8/6ah3OHKWsymLdP4/URZAP/EvIi6elkr61BedqUxl1g8g7aPsOWN9ld+wGN1726PVRZ13XXoO94U+v3SsPU77o/ROP1VmRX4It75U022ic93tZ9YtlePahlfTvZDuhc1o39bem+PO5DHgOGrd/NbeuV9wlkIKJzebfU7TnD1O0J8hPk+exjtU4OU97LMP54+xzy2HEpQxy7yXbsxTTaseSjfe8lH124FHh5xzK5H7XdVb+z48geeYfX+rEF2dtu2QHpbE1fz4RGvTq2lnmXuv0Q8ry4dH29Lnmevprxx5JBZb1O3/f2pmnU7f79bZO6TYc3/tfVg+okIzpP1nWXq99PM72pHLtn1XmysR3N8l6bbNe/aZjyXpx+ZjwDi9MPOZbBBbWif4scUAPygvgEMtr5Qbp3KX44eTfxveQF2tqNg9ca5IAt32HwxcI+5IXOK8jIdy96thNjXbEHXuDVA8E3yYPx6yZ4f1dqN+oO+fl2Y6dckeyCtSZ5N/3YumNOul113T2BbzZeP5g86G9VDzq7AOt2SOeJwNf6li3b+PuFZOBg0nIiI5B3k3fgnsvY4yBPIgc47FrWe9UyegvZ0HxY88BSv4uBZT1Buss3/r4feSL7EhmVhewmeFCHdFahHsSB1cjBsX5HXjA+jYysHzyNfen+ZIT22i75qessIBt1azbqxlVkj5p167IuAZvVyBPQsuQJ7S11vzmm7o8XAS/rkM5jyK7FC8hG1vJk18dXkfvvl4bYts0YCzqtR97h+WFNa4ta3l3y9Ang6Y3XvePJUOVNXuCvSw1ekMeSG8geQr2yvl+HdL4L7NG3bMVajxaSAa4ndiyjy8hgxXMay1Yigz43k88nvqBDOl8hg417NZY9pKZzCXkM379DOm8hzwfvoq+hQgYA7ybvPnXZtreRdwbfBTyoLntArQfrNT436Fyw+wTLVgBeQAaPXgHs2CE/HyIbUR8i71DuQTaQzhimrGtavQuH95IN/C3IANaZZMO4a3m/qpbHYcCzaHTbBfbtWt71O/s4eTG1SV22NhnsbqbZ5fz0vf7/Sx5fPlDL7hga++Mk6VwH7Nu3bOla9r8lL9S71O3rmvtHXbYR+ShW57Ku632alu7jw9Tvuq8dD2zXWPZEak+/+rrLOfM95COcp5F3giHvVJ9Yy+ntXcq6jN/fTmDsEa6VyB5fNw1R3ofU7TiasYvHh9SyG+ZYsmWz3jH+QuRZXcu6fv715LnnbWQbZW3gceRxb5jyPqpux9vq970meaH2OnKMgYHHbvI8ciHZXvtvsl2zW329PXlc2bnLdtX0Pkgel06p+8c8su38EfL89AngJR3SuZLsmfnCCd7bpZb3vh3SmWh/ezj5/P76ze90QDpf7S8H8jjyvlq3Ox1HGvvbCTSO8+S5/FRygMOv0KHtxojOk3W93r57Kn3XR2QPxq7Hkll1nuzb3/7dG5NsE542THkvTj8znoHF6acesA6pB9AdyCjS1TSe8WK4u4vXk3fwjqgHoE/XA2uvsX868KIO6XwBeEL9+x11Z/8h2dXq4XV5l0bQt8jnFR9S87ZwiuV0fO8gULfvDPJkfASwypBpvZKx54YOIht9/01GPfcYIp3v9w4W5B3idwO/Jkfm731mYACifu4TZHTyTLJRfCCNO8Qdy/o7vfyTgZqPktH7dwHzp1juTyVPMjcCz28s37kuuxw4vUM6+5J3la+mMd4DOTPHB8kGw2Ed0tkDeGxz32B8g+hM+oI5k6T1PPLE8/VaRq8mI7kvZCxg06VHzbPJk/pnaz3djwwebEg+z/g8ujUYD6z7y4d6dahu38b1uz0PeF/HbTugbtf5feW9Ta1nJ9Ktl8+LyCDU15jgpFf3w//ukM4rgK9PsPw/qI2pjmV9IPCp3j5BNhSfSzb0lqJ7z5Aggzyn1Hp5ARnc2YSx48wm1IuJAeksV7+bMxgL9K7H2LFhnY55ejF5wbAlGZDZgxyDZZO6bVuTA7l2SetlZON6S/K49l3GxrkYeMejkc6ryQbYDXQcy2aS7/8SsvHzmPo9fg54a31/w0Fl3UjrVeQF64PrvndY3Wd6gcw1OqbzylreWzAW6Du+t51k43HguBtk4Prz5EXHG8mG7OHk8bvXM6/r3aoDaz3ahbzQfHTf+13L6BnAV3r/m7zYOpq8cFuJvPAbmBbwfOBLjXSeTfYKeEHd3s7n3rpt59S/e70sH0U9N5HHpknrd93fViK7f59a6/abyIv9vYFH1M8N7NlFnvvPr3Xvs/X1f1ADG3QIGA/Y39ZsvL9Bl/2OfLzwy+T59VzyuLti4/0FQ+Tn52Svvs0by3u9IdcjZ9fpktZrqI/O1Xp9PHmj5tnAQ7vWb3J/u4CxXmbXkcHC3vG/y53vOeQF1c719ek1zSvrtg51jCKPSxfUOnBuzdfeZJt1Dnlx3OXu94E1L1uSAc19G/kN8rh38jT2t1dTjwUd6/YLgLMa+9om5PmkF+SZ37F8JtrfDid7WO1Mtpd2ZMAYV4zwPFk/27/vvpC8CbkbeUNjPXJa9anstzN2nmzZ344jzyV7kr3GdhhU3ovjz4xnYHH5IU9259E34B3Z6P8asP2Q6e1Nzhnbe/3/agU8j74I5iRpRD1gvbPuSI8nLxweWg8W7yYbSV3uwu7B+G7WW5ANtQPr63EXfQPSen79v+uRF1G97lxnkRdtnbvs1APdVWQviq+QXaRWJLtun0eH3h21PK4lo56PI+/070k27i+j4wVeX3ovrfk4kLwj9A0yit6lgbcJYwPCzKvr71rL69NkYKJzAKqRzg9qOT29HhRf2veZtRnctXweGWTYnGwQf4OM8g47uNs88tnFf5In+3Ub7/V6omxBh4GVyIvGW2u5bU0GSN5ONj6a3YQHRZmXJafufGDdvhPJRszJ1Oh1x21bluwNtAnZM+NCMpjx0V6Z1+VdAkjLko8TPajWq3PJE9BRwJPqZ7pcpM+r39UGZIPq64xdCPXuhG0/qLzJnhhfY6zh83zyOPIxMhDZG3BxUFkvU+t178L+reRFf2+gx+cP2qaW/e69ZAPoxbXc/s7wA8fuRjaGHkXePftWzWvXi9je99+78DmjbtdXycBd79GjLnf1liEDho9sLPsIjTsiNO5cTZLOHLJhv2P9zm6reWre9Xp8xzy9iPEBrOXJffV44IAhy/p1NAJx9bvbm2zYdh14aimyh8CujWUnkXd0Th/m+6/18BmNNL5c69IRZJfXro8n9erAlvX1u8jjXKc61JfWDtTH9mo+zqnfwZHAB4ZIZw9ypifI3jyfJS8a3wm8eYh0liUH0OvV4zeQ+/8vyPN3r5dd18fUDqx18xm1/L9AntNXHiI/Pwa2qa8/RAYOP0GeAzr1fKnrTrq/kW2qtTumcz3jB3r7JPWY0NtvOqSzdK1Hh5AXNBeQgbHVmts/xPa9A3ha4/WmNd1TyXEOuqQRZFtpz8ayt5Ln3A+Q41N0zc/rarm8jjyH9x6deynZvly9YzpL1XR6dfIDZFDiM+Rd+QOHqEs/pZ4H6352HkOc/xtp9e9vn6n72tuo3dQ75uf/GLuJ9Ka6r1xDnleGvrCdYH/7PNn+7bS/NdKZ1nmysZ+07bvv67rvDtpv6+thzpM7MM3zZP3sRPvbq8hAyxIXMPj3ds50BhanHzIyeBp9F75kJKzTgaKxzsMYG2V8Tt3J30M2/C6l4+MONa1tyIufD5ANoeaz0V+n24lwOcaeHevdOX8WGSnsNDtAX3rHkd2R38/4RwK+CKw6ZFqH1O37IuOf3/wGsGGH9ZchL8xeRY50+5HGe+vXA0mniCV5AluxrrMbeTH2Y7Lh+Akady4mSWNpxh4DWItGA5q8APwCQ/Y6IBuZpzdeP7rmZ5nG/+lyp+Ew4L8arzcmG6C9uvHQLnWTfFzl+Pr3yWRD9ATGGg67d6mX9bNrUu9cN5ZtQgaPPkDjLs+AdLZm/HNwK5LBrDcB/1mXdbnY//fdTTJifjN5EtqdbGDv2SU/df1tGHvudAVynuTXkMeU/+q679X6d3qjfh1JXsQOM0Jx77t5Wy2rIKP6zyLv7n2QDo8VNNI5huz5dCaN5xfJcQnOp2Ojsa4zt5Fmr5H1HbLx+GsaJ+8h8jalWWfqd/7s+vcCsrv6avX1y8mGaNe7zfOZ5iwvjW16DOO7a76PHLzzLWQDcuBdtEadnNbsNY31dqvfzwF9aT2/lvmkjdDGth1CHs8eSz6CdTkZzHgzfSN+D0hvH/Ju00fJR5x6gbC1yQb2dh3T2ZS+3oDkue7zwAPr666NzzWZfFadR3VM52G1Lr6axh08cl++gI4XfLUufow8v55IvSNPHg+OJoM4wxxX5nDPmYe+SgYQtxn0/ZPBzF4Q9f7kRdD8xvd5/qB61EhrPnkHdLL9rcs4R+uQx+mlGQvMvhl4WyOdSXuLNur23uTjJPPJR8LeS7YB9iD32yOGKOunkYH/PRrLlqr5OZcOj5fVdQ4gLxSfUcv8O+QUwS8DPjxEftYng2FH1G3aqPHedxmuF8yLmeYMXWQ76Nl9y95V092xvu7aE25a+1vj+z+NPO9/jLyR0XsM5+X1O+vcm6bD/tZ1NrRRnCd7++7e9XXbvjuw9wIjnA2NDBpM+zxZ1336gP2tU9t0cfuZ8QwsDj9kF6lta+U9ve7kL+17v/OAiHWdOfVg+p+1gl1KjV6TkbSBU7iQkcXeXa/e1I1H1B3qMWQ07JMd0nkdYyfm/md1X03eaRjYoCIbdJvXg8X2ZPfNH5AX7LvW/Hx6iDJak7EL3xeTjcVP153yuC5p1Z34/o2DzdZ9B42FdDwRMr6b/ZrkCed71AtFGncfBqSzOi0np5qfj3RJp5kvsifFdvV179m/i+uBdpVav+Z3SGsrau8ZasCHDNhsSl7Yfo0OJ3vyhLVl4/WG5Ann5lrffkz3wMEydb+7iPEDiC1Lx2caG3n6HBnI2IO8QD6tvvcFBjRi+/LzGeBvZAPh/Y33DiIvlLveuVyBsedqL2JsOrAgL/5Pottd4gfQaGDU7/xc8kS/UZe8NNZ9MdlIOIXGAFJk4HTYYERvvIa3NpbNIx+D6Rwcbaz7GDKAcQJweV22PY07dB3TeUatB28ljyv3IxsBQ+WJPL6s03i9PNmjaX6XelR/34/xg5BtSAZEtiW7CG84qF43/o6+tO5Pjltzd4d07s9Y188tyeDo2YwFapYnj+edAz51vcfXevMuxj93ez0dg8j1f7+t7r9fpD66Q15wfZHherE9mLFZOZ7SWH4tHY9Jfen1elCtQAb/F04hjQeTx41fU8cXqtt8zTB5qvtHb7Cw5uNqV9MhqN2X1g7kjYfmoHarkz0jh72YOYS8CHkVGZDYlOzWPem5hAl6W/XV92XJi74uwexeQGZe3z6y0ZD7Wy/YtDzjB1jcmrFHqK5niOMuYxdqc8hj5hPJ4/f/MUSgrqZxQK2HL2P8ow/XMMQNCcYeDT2fsYHeFpBtgEkDEOQNh8fTuJNf03sjObj02cC7htyuVcgBGl9S12/29riWjo+GTFS3yJtcnQOQjfV2IHtzDL2/Mb49uREZPDyxWc/Ixw4GfmeMHYN69eiVU9zflu97/XSmeJ5kgh4OU9136+f7B0Ycar8dkHbn8+QE6+5H9sab1v62OP3MeAZm+w8Ztfsh4+9070J2L7uwHqBvIKck6pLe+sD/q38vWw+uj2BskKaN60Fo0oMg+Tztv5/xaSzfmOzi9CnyxDNpg5ocyOUPZNRzucby5uinL2VA5Kymcwl5l/TEumwZ8u7iu2s5nUyHhiI5UNw5ZMPuG4w9D7sOORjdu8mgyfwOebqQvNC4Aji07/2Hkg3hQXe9FjB+4MK59ecEGnewO2zXw8jAx5nkHa/mQSbqdt84KD+TpL9U3++Pk+MVvI9uM3P07gj3giy9k9Fx5N2Vkzums3rf6+bB/hHkwbnTbBGMf8ThFWRQ7YjG/vIxJhjIc4J0es8LbkJ2vf1v8nGANerys+jQ3ZFGQI88YW1b62rv+dFT6dgtmPE9cXYj7zydRT1x1bQG9mSifWaW5ciuk50i8X3rPokMptzE2CMPp3XZNiZ4/pXxjbQzgKM65uPRZI+Q5gXn/uQd8U53Yhvl+3Zgl8ayV5M9kLbtz+Mk6RxA3pGfsIdSrZ9dt21Us7x8abK6W/fbLrPOXEoGiXuDTq1DHme/QTaqPtslnbruC8luqb1HVXrdXT9N3sk8F3hvh3T2Js8d25MB0XXIoG2vnM6iMU7NJOlsX///f1DPrfV7/AJ5LDiX7rP8bFm37zHcc9yWTcmL/y7HkuX6Xq/N1GYw2pAMhG7e2K7jyPPxheR5oNNFEXlO3JexsZHmMr6x/6Eu9ZvRzTx0RC8vjWXN2WLOGGJ/+zQtA7CRAdqu+9tlNa2V6+vm9386GTR4fce6/T4ax6S+9y+iw/m2fnYjsi25WX39FLL9dBoZWPss3Z7b34zsDfQIamCEDLj32gKfGJSnul3Xkj0nbiH30YeQj/seRx6PTqRD775eHZxg2dNqWkfRcYauum0H0LhB0KhHG5Btr1d0SGdbsj30mMbrk4bd3+o+8u9xQ+qyZjtzmHPJS+kLVJHnu98w9phAl/3t09xzkNZDGHJ2tvq53gxt60z0PdJx3yV7GnyACQaHZLj9tncO2JU8ZvYC97263ek8WT+7BXmD5dHk+eh5ZKDm/cPsb4vrz4xnYLb/kHdKXl//3qweqD5ADmD0ajLKvMkQ6X2ZnOd9oveWJiNXR3fM10H17+0Yewbp8eRdsOXoNlDMZ8kLqQ+QEbKH1OXNk+HAZ+zqAfNV5Anig+QYAkfSOOnT8bl9siH8cjLi+nzyOeZvMeQjE2Tj65C6Yz+G7C59I/VETUaLBw6wSAYXfk2doaLvvd5d+YHd2+p2vYY80byF+nxc47vfkuEHDNqMvBN8Mn0R3npQ+3Etu0GzRexGBkL+H30nBvIxj5+SJ4FB6TweOHWC5b2D83pkIK7rzBOXMdZ1bz7whFp23yO7BX+zQ572rOlMOKAT2ei+pkM6e5FBj1MYH2R7LdnYv7BLGdV1tq/7yXnAgxvL30BezHya7B46KE+tM7PU9/+j5mnQmARb1n3lDPLE+uBat46t39cHa/0dlJ+dyMDezv37BBkceyh5gdr1+7+CbADfSm1skt1Ne8HXqcxg8nDyruNDGOtG2SWd/hlVesGi+WQQeHvyblyXtEY1y0v/rDMPIRv9jyCPKSuQPbQGpfMfZA+lo8mLmRV731n9vRvdB47ch7FZfn5W69Ya9btfk7xjtUWHdJ5c01lIBrDWY6yba9Tv47IO27ZPrUeHkvvod8hj5qrkBdcr6T6zx1PJff1YxuZbfxzj97kuPbKa+8lEd9UfR7cZjHYnj+8fqL97syjNJ0dl35vuz7Y/kTyenkD2NFiJ8UHf7eh2LunfT6Y689CgfeSRtc52SevJffvJmuTFQ++iZn+67W+PJ889pzB+Jo1eIOsN5Dg6g9Lp1e0j6//dmPEXkKuSbbMu27YHeZF2BHmcPKfuKyuRx/AD6TCeSP1+vlW34QdkQO1xjfc3JC/SB23bxb39iQzyvJsMGL2ySz3sS2t78qJ4E8b3ElmTPL68nQ4zdNUyuoIMQv4GeG3XetiXzp7kueSY+j2vztgxchXy+DBwf5tkH1m5ltlW5D7Y+fvvWza3fl/PG2J/699H1iEDKrswxOxsjf1kwhnaat62osO+y9i55JVkG/SRNV9r1/f3p9t+238OuIIMIvVuHK1Kh/Nk/WzzHPADstf4Y2sd3ZwMdHQav2dx/ZnxDMzmn3pw+hNjz2qfSwYLDqDjBX5fes+gMao5GdlfWCte7wQ28JkYMjDwBsaep7uq7kAvIhtqR3XcAfYFLm28fgdwZPP/dNyuB1BHha6vf0reLe1d4D1jiDJaB/hi37KjyRPsp+gwAEpdp9cFvH8wy/3IxmbX50a3J58P25+xi8PmAEgb0W1AxF37vvstyJN07875WgwxMmwjnSvIE+g59XtfufHe3Pr+wIHoyBPhdbWcX0VeON6PelFLRlG7zDZwCWOPbizH2CMOvedAd6Qx08KAtK5m7BGaNciAy0Prz/z6euUppPP/yAus3j73gi51tJbBi+v+2pzycIVaXk+k+8wc3yNHYT6RjFI/gmw87kReNPz7wnRAOtczwcwsfZ/pciz4BdnV7l3kHaJjGeuiPq/mrUsA8dd1H3kHebLfYoLPDPzOGnW7N2jZWmT31E5jB/Sl0z+DyZm1Tvx7lhe63/1qzqhyLHmBfxrZu2o+HQfqY0SzvHDPWWfOJxv8J9KYRqtDOleSjb0V6vc3dLfdRlpfYPwsP+eRwad30vEitq771UY6J5B34G+q6fS+ty4B289TG3L1ezqffOSh0928vrTObeRpDtkeuIzh2wL9+8mWUyzr7zB2kfYUGgMcTyGtb/fqDHm388j63fVm01iLOn5Dh7RGMfPQpPsIGTzq9IgS2btn//r3C8nj5A9rmg+ry7vsb5eTwetdyQuWl/e9vwYdxpLoq9vHkz0Qf9NXt7sek75O7QlH9mj9MXmhPlRPM/JGS2/wwVXI9sm3GP+YWWueGBus+yTyxk/z7vmD637y3CHzdAt5bDqGDACv3SUvLftJr7y3rPVyqKBBXfcKxto3p5AB+3FlNERak51LlqX7o5zXNLbtibWuX8n4AW677G/9+8g55I22I6nH7SHqZP8MbW/pe3+5LvsuE88Y96O6z/R613TZb9vOAQN7q02Q1rmMPwe8lrwZ0nkA2sX9Z8YzMNt/yIjbp4A/0ojq1QPruMH6OqT1afIOxUpkD4ELyMb6ceQJepgphTYmo6dPAk5oLF9A3jXsMl3OHoyNnj6HjOZ9gyFnGajrf55s2H8IuLKx/Gn1oDjMQf4M8g7K6tQL/br8P+kYhKjf24G1vCcazPLN9e9BkerVeweJ+vrN5En5ZPLC+kvUu58D0tmUsRkqencnTqE2PGpd6DRvbCPN11LH1iAbDNfX7b268b+6Doi1HnliX1gPym8lL0B6swR0CWi9Abi+8fqsmua3GfJuA3nn/NON1xeRDZvz6DsJTSGdL5MnpGFGGn9No6z3r2W997D7SV1/HxrBMfLOxfk1T2+ke8Bub9pnZulNMdVlOqjtqNNK1ddr1e39FB3Hj2jUoaPJwM6zyEb5u8nAzJya3049aur+0huwcm5d/1PAyxpl2GWAprYZTNYnG0ZDzWDCPWdUuZlsuG9B94HHRjbLC5PPOnMuHZ77rWm8t/H6ATUfx5INvKXo1vBcirE7jCdxz1l+3lXr+KDH3oJs3J1cy3pb8k7qI8jj+ifJc+mk57iaTpDn1zdQnzkmz1P7kIHl1wzarl5a9ffbyIuF1RvvbUgeV46c5n5yYC2/feh2h3hnGmMYkYGrrwI71de70nFk75rWBxrldkutQ9uQF11v6JLOJPvJsDMP9Qaea9tHBj7m0pfek8mL/ofV3+uTF/nHkhcQXdpLzwDO7CuzTzNc76de3X7fgLrddbDmFcggz2aNZe8jL7LPo9vYVL08nUIGn3pjQZxR0/kCE/S2nCS9x9R9rDdVam/f2YG8SO7a1X0Tct/dBHg9+Vjim6mzq5AXyV22b1vGBg7u9ca9kDqgbt3GgeMb1XTeW/9eutbpp9a6fjkT9LRsK++WfaR5Lul0LUA+6nAdY+OwXUFe9O9U95mBj8w00tprgn2kN5bX5+h+fnsCjdlguOcMbV3OJVG/q2O557lkARlg+/yg/YTB54DPMPw54CjueQ7YiDz2Luxa3ovzz4xnYHH5qQe9nRqvdwW+3XHd3uiiT62V7hTyzkmvEs8jn4saOLheraC71h3oGPIC5hryJLZCPQB9tUM6m5Jd2TdjfHfEDcgTzsvodjdnr3rwWpG8g/cUxg+mdDAd5teun305Y91azyUvgE9iLFJ4GPUiYEA6r64HuhXJO1W/p96Vq+93GsyS7J77LHJAnuZznuvW7/AualBjQDobk92jN6M+ClKX70lGUZ9OxznfG+vOIaft6z1a8jbGRuffk+z18ZAh03waYyfT95Cjjr+NDE51mZHh+WT33ffU+nxSY9/5IUNMWUqemC8kgyNv7NWpWj++zgTPu02SzgUTpLNFTadLT4O5dV9rjnPyHDLK3GvEDDNg4GbkRfBhtZw+0di2b9D3jOEk6WxOnvjaZmbpOvDcmmQX+1cyvsfKTuRdv66DRi5F46Kg1vuXkyfto8npOXfumNbytTyaXZOfQO7Xy9Z87dDx+x/ZDCZkwPhT3HNGlU/ScfC5mv+HkcehnzCNWV7qOgeTFwtTnXVmbqOMeneEtyGDKk8YIh+9htWWZJB3yrP81M/uUcvio8Alfe9dRPdBtXYgA6qnkBdln6nLH17zOUzg6AF1neeQQcl5dfnaZCCqyxR8c/r2s/795H+67Cf1f+5S95VmV/mT69/fHWLfXYuxHljb0RijgbyhcDodpwVkBDMP1XQ2GdU+Utc7nDyffZLx0x1eSrfHQjZm7LG5pet+c0Rdv/PjqnX9vcnA+n9Np243tuurZID2vdSgNNktu3PAhzx/f4ocU+YMapuEDLq8g24XfKvW388kAz8fJo8lm5EXe58YIj8rMP54tmXdRz5Y68Wf6XbTZiUymLEsY8eo/chAxFJkm+UxHdKZw1hvkE0YPw3f2vX7HOa4vVLLPjLMuWR+Xf/NZAChOVDzo2v5dwrU1HXeTF6j9O8jl3TZR+pn12VsFq7e8XFKM7SRwZBTmP65ZAcywHYy0z8HbEz2qJ7yOWBx/5nxDCyOP4zN4bt7x883n81an+xSelDfZ65hwIUeeXL/GnlX8Zx6wNiAPBmfTV5kX8aAuww1nSvJOy7fY2wgpN7O3nsmfNAIrCuRAZBryWfNehexnyfvfr2M7Fa0RYcyWqN+9oGNZeswNobAfPLO7KTBlZrOT8hnjz5JHux3rHm8jGxgDxzMspbRtfUAcSF5p2Q5xo+g/pcO6axdD3Cfrt/P2eSz5OuQz7PdAPyK4brw9k5+zdGdd2f8CM+n0v0CtDeY4mY1r6vX+vFG8uJ2vy75qX+vRt49+y2Nkx95QdtpruW+feWEui0LG8vfTn1GsWM6G5ENjlMZ/yhO53QYazD2GuhBBqjOYrhGXq+s9ycbUqc380A20A7rkE4vH1OemaXve3t03TcOqPWgdwF5KvW55EHbRd4JuEewsdb17zHkbCEt/+eMum2d7jgyghlMyIuEjRtlsibZK+c6hp9RZdXG3yuRF2Vr9uVp4Cwv5MVdbyaddclj7dcYftaZA6lzmrfU0T/T/fn/Y6mDizE2PeVhDD/LzwKyIT2n/j2XvODbgzwmHAN8vGOe7l/r5oPJAO3jGXuu9Z0M0bW/US57kgGsU8m7VnuRx7xOMyvRN6r/VPcT8ri/VN+yjchef0fTuOs3qH5PsI3NNsuRXdKqZdxMq7effG+Y/YS8UO0N0Lli3Ueag+R22kfqZ19HDQ6TF7DvIs9tx5PHunfR4cYGeRH7QCYej2Jh3fc6BeoZG+9nxVo3j5pK3a5p9NoCTyXPAy+iXmzX+t1psLdGes+o++zevTKv6ZwyYL2Hke2t91ODFXX73ky2VU8nb0h1mvmgrx71j5WzNfnIyuld6mTL8pXI3g8nDdq2Rp184CR5XAh8qGN9bAYcpjM7V69OziHPc0/jnjOGdT0GLFfTWZs8jl9Ntr067yM1naXIdu1E+0nnGdr61pvyuaSutzrZXtuCDGjtzBTOATSuh8ibaecyxXPA4v4z4xlY3H5qBVyPxh3sDuu8j4ySrtFY1rwzdxy1W+6AdE4HDq9/70XeVe6NEL05GUToMp/xh6lzA5PR5dNq/g5jLIgwcH7l+vsQ8uLlJWRE8WXkxfHb68/AwQcbeeofhPJzNe1HkBfGh3RI5yOMdf8/lfEXZbuRjYcJTyZ96fwn8Mb696vqtp1FRj73IQdD6TKa/8mMnUg3JwMqH6ZewNb8DvVcKvmIxEo0GvyMb+itTQaihpo2ra771JrHC+vrZRgQsSYbsPMZPz3SWo2/N6j56VI3VySDNo+tr/euZXQZefHwKjLANGn0mzwh94JOc+p2fYSMnB81RDobMH5E/+bF9tJk0O6SQWVUP78JOe7HXMYa6OuSd4n2ZazHxqBpnLYguyE+hLyLsnvdR3qN7Y3pNjPL/cjGRy/YsBN5HHg32bA+nAzWTRrVJ6P2F5DHkSvoC8bU+nHboLJu1N2H9pcnY43ts8g5lwdt20hmMCH32fPJffYj5EXfUmSD+Lwh962HkgNivZwJ7t7W77PLLC9bk70J+stog7q/HE+3WWceyYBpw8gLmi5dyzcng4UfZfxdwkeR563e87yDZvnZqm7bOWQAeulGOpeRFyeXdvj+ezPzHEd2u92v7/t/INmIHXhnj75ZdRr7zqE1/dPrfjNpALF+vx8izyELGWu89ur2cl32E8ZfpB3e996y5PPFNw0q6/r5Tev+9JLGsuYxbgvymDTpuYQ8/nydviAEdZyEIfaRSetk3faB+0j97MNreZ7G2F3BZcm208HkhdERDBhzhbEZoz5KnS637/2VyLbOpN3LyeP/h8jgwEeBZ9blj6l1+uNd6najHD5OtlM+Rl87izzW/KJDXbpH3e57/0Ed07mA7O3yVPKYuz3ZG2ZdxqaH7tpjZaI62Rx8dBny8Y5B58lHkPv+RBexQfbsuG3QftKok/eoc7WOb0mHaWr76mOvh9fS5Hg053bdR/rq5MdoTOHYeH+zut8OOpf06uRRZO+X3rP7jybPUyd22Ucaebqwbt+VTPAIAN1maOu1AZbtW74tw51Lem2Aj9b1Nuh7f2O6nwOa5+7eOWl5sufL8XQ8BywpPzOegSX9p1b2W8iT/Pfou5tINibO7LAzrUOeVJoRxZMZe1Z/DfoGRWtJZ12yYdZ7ju2H5Ml9n7oDDDW+AXkhfinZsHs4+bztzXQcAK+m0TYI5fNqnt5dlw8aj2Bj8sKldzd2V/KA/vYht6l3h+vt9fUN1Ol3yMHxTqHb84xzajpvaSw7gTzBfoJ6Yuw/QHZI91N1/c+TJ7Q9Gu+tTF7gdJk27ym1XjUbiuvWejVwYL7GOh8mL+jOIU8y8xi7MJ5by6vTuARkg7g3hVRvYKzlyOcZ96tlt1OHdC4ie57s2Vi2BtmweS75zGSXdFpnQanvr0TH7nd1P3lp87uqv19InnhOZ0Dvjvr5z5EjFr9xgveWonZ57pDOx+p3/Umy0b91/b72JO9cLaRlqrAJyugQxmYvuZLG7CX1Mxt3LKPv13UPJANI0ff+VnR4TIURzWBCBg1eTTZmP0jOPvAixj9y1OkxFbLx8VPymHQdecxdinwEYy4ZjOwyy8v5jE1R+xDy4v5tDDE9ZV13opl5rqDx+A7dx9s4l7yQfhN5TmkOILs6eVzqMsvPFxgbx+J9ZHfQj9XvcR6NmRU6fP+9mXn2A/5Gdt3vPY++DN0H+ptsVp1er68uj/V9hezF9SzymPiixnu9QPzAPDH+Iu30WjZ79cqcbMwe0nHbdibH7Pll/ek9l/4I8vy1H5Mc/xrpnNfbHjKAtT15/m720Osy3sonGulsTR4HPsPYAJDbddlH6mebM0ZdS2O8H8bOT13O480Zoz5ABmrfQaONQ4fnv8nHCV5DHsNeR/Y0/Eyt02vRN7PCgLQuq2k8ljyOXFrT702Ztykd2mAT1e1GXZxDjjb/iAFp7Alc3Hj9S/KC7R21zg97h7m1Ttbfq9Ot50p/nXw02du3l84+wAFD1smtyEdCmnXycXQY74h7zmC2SeO9Xm+2rueSZp08jWwfvZ2xGy570qHH6QR18qb63T2of1/pkFaXdkCXAZabbYC1Gd8+XZM8D3R5JKzZBjiSRnuJsf1/4E3E+rnmuft6xmaZ2I6xc0Dnxx0W958Zz8CS/lMr7WH178fWg9k3gX3qshfR11V0krQ2oxHVJk/uH69/n0vf4w+TpNPrdr1i3860Uj0gdWpQNdY7kGy8zqOO5kte1A7zfOwGTDwI5cr1ALBhhzTmUiPjjJ38Nq1lvl9zeYe0NiEbG18EvtFYPoeMYnedleHh5EnmTPIC5uq6fEPyAj+65qmut3etP/PIuyfPJ0dj/gLZ+Lgf8OwO6TyPjKB+lGx0zh8mH4109iVPPuuTz5GdTePxDbKBvm3HtJ5R01qJbLD+Z61bb6XjM3+N7+hVZODg+kb+nk3HZ+Ia+WnOhPFM8mL6ZDrOntBYd6u+tBaSJ9pzh9nfyIuFL5F3TK8h7zrN7/tMl/man042NpevdelcctrTj/SnNyCdVer+tW3f8v3Ixu3AcQj6yujCmrdzyePBXox1yX8A3RrouzKCGUzIBuGXG69vquXzRrJx03m2mEYaTyEbG7uRF7K/b/6PDuvvQF5w7Fjr+QVkQ/RoMojYaRAz2mfmObDuN++k+7Fybxrj6pA9VT7E+MGjujwfvS55vJ1fX/+CvDt7aC2nl3bMz9o1nWaD82iyl8jZdJxWsq73aPL4sR8Z+O+fVWc9ut2N24Px55HH1nRXr69Xp9t+u2dfWf+SbOgfTR4Xhgoe1TSeTV607kMG/f8IfH6I9XerdWaD+vq8Ws7vq/Wz09g2ZEDvWMYu0q6qdfSQuu+9fIg87cuinTHqCPL4+6yO6WzYl879ybu5b6WOBTTEtq1BXpDfv7FsHtnb8+PARkPU7YvJtkBb3e4yKPJWjA2y/UzGxuxZgTzPHTqFOrlvLfup1snH1e9q/b46eRLZVuraJlmaPBb2phFt1smbqUHOjttzWX99pBGkGWLbBtXJpwxRJy9qqZMn0CHQ21h31VrGbbOYdWoHkMHCC8nHLs5lrA3Q61m9Ht16Uj2OxlSu3LMNsC5DtHPqOvuQN0h75+4/NL+H+9LPjGfgvvDD+MEHl64708VkVO3nQ6TT3016JfJO79GjqMAMMeBj33qbkBd532VsQLxOdxgnSGvKg1D2lxVjXUCfVQ8ak0bOW9JZo55s3kFGO3ekMWtExzS2JO9+vYqxbuF7Ad+ZQn72pj5Lx/jHE46jDorVMZ1jyaDD88kLyJPrQbHXha7rBci5jB9w7s00RhfuesKon/00dVAw8iT4fbJ3wAlkA3T+EGnNJU/uK5Ld435DBkoeNGR+Tmc0s6CsSzb2HkDeEfwceYFzJPA7OvbwIJ+l7d01XUBeoB3QeL/rxd5LGP8Yzz6NbXvFkHXyBWTDZaLZS940RDor07jIIHv3XEJegGxGBvIGPm/PiGYwIbs6blX/fgKNZ2HrftN5do/GeltT786Rd4tuJYOtp9NtZPeNyeP9W2p9Oqfx3gsYbqaQjWmfmadTfurnd2qU0xyycXcWGWQZdp70U8gus/9F4/hYy+1UOtzZr58/vaY15Zl56ufXpDGWEVOfVWdbai8Axu4ufo7ae4a8q9nlLuojmfwi7TX19TDB6N0YG1DvwWR36r+Qwe45g9Iiz2+fII/ZnwDOrstXrHW184w65Pn/veR59ujG8i3I40DXO7KjnDHqC2SAfVozRpFtvuPIHnR71NerkMejTuNlNdJaCJzRt2w+2UOz0/GbHI9osrr95UF1u27Hto3XKzA+YHgoHZ79nyDdPVvq5Okd6+RmZBDl6Fonz+qrk6/qup+Qd89Pa6mTJ3epk2RPte0a9XGbqdbHmsZks5i9e8g6+Z6WOjnpGEkTpPUCGo8FNZZ3bgeQ1zWTtQGuplsbYEMyCL4UY48WNNsA5zP8LGaPZOJz94fpeK5cUn6WQotcKeVfjb//WUo5kzr4DLlTdU2n9KVzB/lc6eFkN6Upi4jlyQu0hcOuW0r5IXn35DdkF3DIHWpopZTLSymX1TwtQ160TyVPpZRyV/37LDKKucoU0vk9+fzXHHIbjyRPsMOkcW0p5eRSyomllKsjYiXyTtpbhs0P2dtghYg4tLd91ZHAGhGxzWQrR1qKPJl+qZTyUfIu9h1kZP2AiHgH2aVvUDpLkyfnWyIi6lufJU+KRMTLyG74k2qkdUop5dKImEc2+HcppXyMvMP7J/Iu+0ARsWzd51YnnwE+lbwj9m3g8oh4Zof8zCUvfm4ig0aHkBej/0k2kP9KNmg6KaXcRJbNs8lAxCmllN+WUt5KNko37ZCneeTzjF+LiKVLKbeRJ/jXR8QL6/8pk6XT8D3gpRHxiojYlqzTPyYfqdoxIlYflEBEPCAiHkte0C8AfhURL298ZA7ZK2qgiNiQLIM/1v2DUsoHyKDfr8jg1h9LKRd2SGc54JsRsVzj2HshsHZEPB24u5Ty9Q7prATcWY+Nl5Hd8HtWIhsPXbZto4jYJSI2LqVcDXwoIg4gjytHlFIeAPyklPKPDnnqzerwM/KRgLc2PnK/Lnmq+dmV7A30V/JYtHNE7BwRK5CBhPUG5aemtQH5GMBfImKFUspdpZTfkOe1VYETI2LVjnl6NBkw+AnZAL248ZHeAHn/7JjOh8iA9pfI49HR9SO/pnud3JzaQKzHJ0opR5EXEEEeM+eVUr42IJ3NyAbsN+qi3jZ8Edg4Ip5C9lq4ukM6ywJ/q8fbC8iANKWUvwK3k11qBx4HIuLhEbF7RGxWSrkIuKTm42jy+LYKcHP9PlvTqnlahgzq3EHWwaNqHu4k78ptPllemvkhy2YZMjD7uFrnIYPaD2i2pSZJa1PyfHFnRMyt2/Ad8ti7fkS8rPd9dsjTDuTF4Z/I8v5y4yNrkL3FupT1DmTQYG2yfj8VOK6U8mfypsaWg/JT03pSROxItvfujogfNI79fyHPV5O2AWo6e5JB9C/1ltW6/f/qyzvIbuWtdbsep08DTo2IUyJi61oP/1TfX4MM/L+v47btFRG71LycT9bJZ5DHp16dvKVDndyTvHHwfLJr+Qpk26ZZJx9WX0+WzpMiYtdSyjfIi8TzgCf01cmNBtXJiNgD+EMp5dv1f95VSrmKfAyrc32sae0VEY+q615PntcuaXxkjZqnQXVyr4h4JBl8XJ6sk89kfJ3cqmOeem2Iz5AX/r+KiJc2PtKpHVDbDpsDV/bak7UNsC95bXEp8OcObYCDyePQf5VS7m6cM74IrFPbAGVQG6CXp4jYrublO+S5+/nU4H09d/+sy7lyiTLTkYv76g+54097BE6ycXTMNNOYU9PpfGegJZ359XfnqekmSWvoQSjb0hnR97US+TjFhiNIaw6NkXU7rtMcHGhbskva9eTdvqXJi+QfMeDONeN7KSzTl+6DyOfd/8ng2SLGpdP33inkHdpvUwfbHCKtXi+R+X2fuYbBs470jzB+P/KO0Fuody/J7pkPGCI/U54FZYLvbWPyLsFPyWDBg8hgyA+ALbvmqb5u9mLajbz7/ORB9b0vP7vW7+g9zX2f7I653oB01iLHRfgaGWB5LHkH5mrGZi8ZuF19af03eSfgORPk9W8dymgt8jGir5F3CZ/TeG+Nur/8mm6zzlzeyM9z+95fjQyWTLqPNNL6Wi2TC8lG0PxaTj8YtH7Ltn0GeFJd3rubsgoZDJo0T4xoZp6+PF1GXlQ9u/m9MfaY1qCBDHvf/9drXepNCfsR8ph2WP096bGkL52Pk8fG5nSeq9bvf2DPs5rWFWSw79/fNeMfFfxLxzxdSfYuaKYzh+zN8B1yP5l0Vh3Gz4R0bW9fYGzQv/uTPf661slv1+/mejLY0Hv06Zoh6+SVZK+zb5HHyuWp3dvJHkQD89TIz+fIwPgC8lhyDXmxdkQtp82HyNN0Z4zqff/nkfvKg8iL0fPJC7UX0WHGqL50vkYeO9Zk7Lno+eT5oEtvk94sVteQgYMNyQvga2qdP5oMKE766GtN52byGHRyf11q1O3WbeOeg2MfRB5z30geR9Ygx115/6Dt6tu2q8gg1KZkkOx64JtD1Mnetl1DBmo2qMt7PSlX61gne/m5mpxVYI1aL6+q9X1hlzpZ0/lt/Z+nMLb/9wZu3qdLfezL07VMYxazvnTezdjjHM0y6lon1yAD2M0BcXep+biQbFt0mcWsN6vaRo1lcxg/iPydDG4DrEHeAOnNLBI1nXlkAOl7dJzFrLFtGzaWbUT2hL2ma51cEn96O7/uZRExhzyZ3TGCtJYqpdw9gnSiWCFmpYh4H3kifXPJXhBExCvJk8W1ZIPt56WUQzum86aSd6x7PTvuKqXcFRHHkifbfTukM6+mc2tdtnQp5Z8R8QLyjt+xpZTDO27bPLJ3QC9Pc0rtURERveftJu2dU9NZhryL20tnP+q0QqWUzw/KS19+jmiU9Uq9fTUijiOfkR/YW6iRpzc3yumpZLe+n5GN65+XUhZ2zFOzjJYmv7e7I+JI8g7hhzqm8+ZSyu/qsnmllP+pf59IXug9d0A6p5N3yd8eEU8iy3ibUsrttRfCP8geAj+bLJ0J0noi+Zzlk0opP6rvb05e7J0xzXQ+BtxZSnnpVNOpd0IOJxv+b5zCtp1E3tUL8qL/112O333p7Fnz9IRSyk/r+4eQz35O2huqL529ajqPLqXcVsv5DuB/evV+yG0bV971Mw/s5XGIPB1PPrJ0c72bujJ58fDNIdJ5Uk1nj1LKj+v7e5IBsVM7bNuHgV+UUo6OiDeRwZUgG9QXkUHJ7Uop755iOj8nL0BPIPfbZ0wjP5eSY2esUkp58RS2bSOy58lvgRtKKRc0jwkd03lzzdO/yHEXvkJepK/Z4bjdTOctNZ3fkXcZV6pp/aNZr4bYtl45/YycKei6iNim5B3fYbZtffJi+mdk4P6P5OMv509h2/5JltHFZK+1jUopR7enMtY2q/v5ivX/70H2gNiCvIj9OXnMbe25MkE6t5KPTX6T7PXwMjKItGMp5bjJ8lTT24a8+DyMPOZ/kOxJezAZLJlbBvcSmihPTyLL59XkIH+XD6qTE6Tzh7pt/03e8X4ZGfBZf7I62VLWe5GBtkfVfF0P/Kt3bBliu5plfVAp5S8R8YhSyneHLKM/kPvXhWTQvnee/UYp5YIh0rmNLOteGb2cDLg+ZFCdrOl9GPhRKeVdkb2PnknexLqObGdeBvy9ZK/krulsQvY03JKs1+eT+8xjSikfnGI6V5GBydeR31uXnrD9ae1L1u0bge+XUr7U5Ti5RCozFLHwxx9/uv1wz5k5+qfg2om8czHpADsTpPO6vvdXISPgkw4c1yGdrchIbZeBlQal9XCya/agWUf603l9473OAxlOkM6UZkFpSeuNfe9vSkbDJ30OvEMZ9QLAk/b06VCPNiDHX5h0AELaZ3h5S/37/nSY4WWStN7H2FSo6wGP71BGg9JZnRwNfdIBnzqks0ZNp8v0m21pvan+vX7dtkG9RAblaa0ueZrkextqZp6OedqwS1qTpNPL0/LTTKeXn7XJ54wHDkLG4JmHTmjud9NI56i6fNKRxjuk8866vMvMDm1pPa2mdY/pBqeYp3fV5YOOSW3pPKOm07k3ZYc8dXqefJJ0nk7exX7DNNN5CuNni+o8Dgjts1gNO0ZCfzpX1HR6Y24MM0ZG/+DYh5K9DzodSybJ07fIYNawvTPbtm3vrvtJSzrfrensOaL8DJz1YIjv/7EjytNTutZJ2mdDO6DW74EzOw1I57k0jrcjSOcddXmXc/egbTtp2O9uSfqZ8Qz4448/k/8w8cwc32qc5F9Fh0H/WtJpzvBxCN26cLWls3ddth/dpygclKcX0q3L3KA8vZoOAxB2yM8ws6C0fW+9rtivHdH39oYRpXMwHboD189ONsPLeXSc4aVjWi8aQTr/nsJwmul8gTrw4jTS6g1od+6Itq1zngakc+4Iv7fOaQ1I5/MjTOeFQ2zbZDMPfYkBjzt1SGc+2XNhgxHk5ysM8QjdgDxdSPepU0eSpwH5+SLDzTozkhmjJklnZfIu/yjK6Mt0nAGhL83+C/VXUZ+/n2Y6h9Z0hh0Ub5SDYzfz9Iuap3NHtG1TCWZMVNajys/nhk1nkjwNNYvZJHn6/DDfP+2zoa1S992ux8m2dFau6Ww4gnS+Qn0sYwRpXTiVfXdJ+ZnxDPjjjz+Df2ifmeOHwE9HkM6N5CAvo8jPL0a4bcPMOtKfzv5kd+AfjLCMOuenQ1qjyNMot22Y739kM7yMKq3Zls5szJPbNr0fpjjLz2xPZzbmyW1rXXckF+qjSqeu9xTygro3Y8iUxpaabds229KZxXka1WxosyqdUae1pPzMeAb88cefqf2QdzHuZMhuirM9ndmYJ7etUzonkFNe7jSCMhpJWrMtndmYJ7etc1rLk4/4THc/mVXpzMY8uW0D0xjVhfpI0qnrzq+/pzU49mzbttmWzmzNUyPNZcgxIKa7n8yqdEad1uL8MxdJi6snAReVxpRKS0g6szFPbttgp5GDmF02zXRGmdZsS2eUac22dEaZ1qxKpw5mvD7w4ensJ7MtndmYJ7dtsFLKZyPi4lLK/0VOOTlwmspFmU5N6y/195TTGGWeltR0ZmueIAdfJMfKef8095NZlc6o01rcOauCtJiKEc3MMdvSmY15cts6pzWSGV5GmdZsS2c25slt65zWSGYemm3pzMY8uW2SNPsYOJAkSZIkSa2WmukMSJIkSZKk2cvAgSRJkiRJamXgQJIkSZIktTJwIEmSJEmSWhk4kCRJkiRJrQwcSJIkSZKkVv8fRNtriohxlWgAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAHPCAYAAADTUpj2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAC5eUlEQVR4nOzdd7gkRfXG8e+BJeecc0YySBQliKJIUAFBBBQQlSBRRQREiSaCoigZCT8JKkGCZJGcc5AsOUoSRIHz++OtcYfrzp2eO33nzu6+n+eZZ3dS3Zru6urq09WnIzMxMzMzMzMzMxuTCUa6AmZmZmZmZmbWvxw4MDMzMzMzM7OWHDgwMzMzMzMzs5YcODAzMzMzMzOzlhw4MDMzMzMzM7OWHDgwMzMzMzMzs5YcODAzs7FeRNwbEWuMdD2GW0QcGBEvRcRzI1iHNSLiqZH6+1VExBYRcclI12MoIuKQiNh1iN+9KSI+VHOVzMzMHDgwM7P+FhGPR8THB7z25Yi4pvE8Mz+UmVe1KWfeiMiIGDVMVR1WETE3sAeweGbOOtL16QdlfS448PXMPC0zP9Huc/0mImYCtgJ+U57PFRE3RMQrEfGzAZ+9KCJWGFDET4Ef9qa2ZmY2PnHgwMzMrAY9CEjMDbycmS90+sWxNVjSD3q87L4MXJiZb5fn3wVOBuYDNmoECiLiC8BjmXnLgO+fB6wZEQ4smZlZrRw4MDOzsV7zrISIWDEibomI1yPi+Yg4rHzs6vLvqxHxZkSsEhETRMQ+EfFERLwQEb+NiGmayt2qvPdyROw74O/sHxFnR8SpEfE68OXyt6+PiFcj4tmIOCoiJm4qLyNih4h4KCLeiIgDImKBiLiu1PfM5s83fe/jwKXA7KXuJ5XXNyiXabwaEVdFxGIDlsl3IuIu4J9jOgCOiA9FxKXljPbzEbF3eX2SiDgiIp4pjyMiYpIWy/4DZ/Mj4qSIOLD8f42IeCoivl2W77MRsVFEfDoi/lb+7t5N392/LIPfluVz7xjOqrfVPCMlIhrr/c6y7L5QXv9MRNxRlt11EbHUYMsuIvaKiEdKve6LiM8O+JtfjYj7m95fLiK+FRG/H/C5n0fEkS2q/ingL03P5wOuyMzXgJuB+SNiamAvYO+BX87MfwG3Ap+svrTMzMzac+DAzMzGNUcCR2bm1MACwJnl9Y+Wf6fNzCkz83p0hvfLwJrA/MCUwFEAEbE48CtgC2A2YBpgjgF/a0PgbGBa4DTgPWA3YEZgFWBtYIcB3/kksDywMvBt4BjgS8BcwBLA5gN/UGZehg4qnyl1/3JELAz8H7ArMBNwIXD+gMDD5sB65Te/21xmREwFXAZcDMwOLAhcXt7+XqnfMsDSwIrAPgPrVdGswKRo2e0HHFt+7/LA6sC+ETFf0+c3AH6Hlul5lPUxVJnZWO9Ll2V3RkQsC5wAfA2YAV0acN6A4MjAZfdIqe80wA+AUyNiNoCI2ATYH11mMHX5DS8DpwLrRsS05XOjgM2A37ao7pLAg03P7wHWKd9fHrgXOAA4IjNfbVHG/WidmZmZ1caBAzMzGxucU84MvxoRr6ID+lb+AywYETNm5puZecMgn90COCwzH83MN9HU8M3KAd7GwPmZeU1m/hsd9OaA71+fmedk5vuZ+XZm3pqZN2Tmu5n5ODog/diA7/w4M1/PzHvRgeEl5e+/BlwELFtpicAXgAsy89LM/A+6vn0yYNWmz/w8M59smvre7DPAc5n5s8z8V2a+kZk3Ni2XH2bmC5n5IjpQ3rJivQb6D3BQqePvUFDlyPL37gXu44MHutdk5oWZ+R5wCsNzELw98JvMvDEz38vMk4F3ULCk4QPLLjPPysxnyro+A3gIBVQAtkPr9eaUhzPzicx8Fs102aR8bl3gpcy8tUW9pgXeaHp+CApW/AW1+YmBpVCA6PSIuDoidhpQxhulHDMzs9o4cGBmZmODjTJz2saD/z2L32xbYGHggYi4OSI+M8hnZweeaHr+BDAKmKW892Tjjcx8C51FbvZk85OIWDgi/hQRz5XLFw5GB8rNnm/6/9tjeD7lIPVtWffMfL/Up3lWxJMDv9RkLnQWvW3Z5f+zV6zXQC+XIADo98Hgv7n5jhFvAZOO6TKLLs0D7DEgGDUXH/yNA9ftVk2XNryKZoc01u1gy/JkNMOC8u8pg9TrH8BUjSeZ+UpmfiEzl0YzaX4B7IwuVbgH+Djw9eZLVMr3Xx3kb5iZmXXMgQMzMxunZOZDmbk5MDPwI+DsiJiC/50tAPAMOohsmBt4Fx3YPgvM2XgjIiZD09o/8OcGPD8aeABYqFwqsTcQQ/81g/pA3SMi0AHs04PUr9mT6PKMtmWj5fJMi8++BUze9HxsSMz3JJoFMW3TY/LM/L+mz/x32UXEPOgSi52AGUrw6h5Gr9sn0WUxY3IOsFRELIFmeZw2SL3uQkGvMdkeuCEz70GXNNxSZsLcXZ43LAbcOcjfMDMz65gDB2ZmNk6JiC9FxEzlDPyr5eX3gRfLv80Hy/8H7BYR80XElGiGwBnlmvazgfUjYtWSN2B/2gcBpgJeB96MiEWBb9T0s8bkTGC9iFg7IiZCt2p8B7iu4vf/BMwWEbuWZIhTRcRK5b3/A/aJiJkiYkZ0mcapLcq5A/hiREwYEevyv5dmDLeJI2LSpseEY/jM83xwvR+LztSvFDJFRKxX8j6MSSPw9CJARHwFzThoOA7YMyKWL+UtWIINjYSFZwOnAzdl5t8H+S0XMoblFxEzAzuiNgjwGLp7wpTACsCj5XOTolwIlw7yN8zMzDrmwIGZmY1r1gXujYg30fTuzUr+gbeAg4Bry3TzlVGCvFPQdeiPAf9CU8Ep19/vjK7LfxZ4E3gBHZy3sifwRXSd+bHAGfX/PMnMB9HU918ALwHrA+uXs9BVvv8GsE753nPomv01y9sHAregM+B3A7eV18Zkl1LGqyg3wjkd/5ju3Isud2g8vjKGz+wPnFzW+6blNoZfRYkX/wE8jJJkjlFm3gf8DLgeBSGWBK5tev8s1LZOR+v+HGD6piJOLt8Z7DIFUNLET5fZLc1+inJOvFmeHwKshWY6nN90W8b1gasys9XsEDMzsyGJzMFmMZqZmRlAObv7KroM4bERro6NRSJibnQJy6yZ+Xqbzx4MvJCZRwzh79wIbFsuZzAzM6uNAwdmZmYtRMT66BaFgc44rwQsl955WkURMQFwGDB1Zm4z0vUxMzMbirqzFJuZmY1LNkTTywNN3d/MQQOrqiTlfB7dlWLdEa6OmZnZkHnGgZmZmZmZmZm15OSIZmZmZmZmZtZSTy9VmHHGGXPeeeft5Z80MzMzMzMzszZuvfXWlzJzpjG919PAwbzzzsstt9zS/oNmZmZmZmZm1jMR8USr93ypgpmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbVUKXAQEdNGxNkR8UBE3B8Rq0TE9BFxaUQ8VP6dbrgra2ZmZmZmZma9VXXGwZHAxZm5KLA0cD+wF3B5Zi4EXF6em5mZmZmZmdk4pG3gICKmAT4KHA+Qmf/OzFeBDYGTy8dOBjYaniqamZmZmZmZ2UgZVeEz8wEvAidGxNLArcAuwCyZ+Wz5zHPALGP6ckRsD2wPMPfcc3ddYTMzMzMzM7PhNuuVdwz5u8+tuUxt9egHVS5VGAUsBxydmcsC/2TAZQmZmUCO6cuZeUxmrpCZK8w000zd1tfMzMzMzMzMeqhK4OAp4KnMvLE8PxsFEp6PiNkAyr8vDE8VzczMzMzMzGyktA0cZOZzwJMRsUh5aW3gPuA8YOvy2tbAucNSQzMzMzMzMzMbMVVyHADsDJwWERMDjwJfQUGHMyNiW+AJYNPhqaKZmZmZmZmZjZRKgYPMvANYYQxvrV1rbczMzMzMzMysr1TJcWBmZmZmZmZm4ykHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrKVRVT4UEY8DbwDvAe9m5goRMT1wBjAv8DiwaWb+Y3iqaWZmZmZmZmYjoZMZB2tm5jKZuUJ5vhdweWYuBFxenpuZmZmZmZnZOKSbSxU2BE4u/z8Z2Kjr2piZmZmZmZlZX6kaOEjgkoi4NSK2L6/NkpnPlv8/B8wypi9GxPYRcUtE3PLiiy92WV0zMzMzMzMz66VKOQ6Aj2Tm0xExM3BpRDzQ/GZmZkTkmL6YmccAxwCssMIKY/yMmZmZmZmZmfWnSjMOMvPp8u8LwB+BFYHnI2I2gPLvC8NVSTMzMzMzMzMbGW0DBxExRURM1fg/8AngHuA8YOvysa2Bc4erkmZmZmZmZmY2MqpcqjAL8MeIaHz+9My8OCJuBs6MiG2BJ4BNh6+aZmZmZmZmZjYS2gYOMvNRYOkxvP4ysPZwVMrMzMzMzMzM+kM3t2M0MzMzMzMzs3GcAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWUuXAQURMGBG3R8SfyvP5IuLGiHg4Is6IiImHr5pmZmZmZmZmNhI6mXGwC3B/0/MfAYdn5oLAP4Bt66yYmZmZmZmZmY28SoGDiJgTWA84rjwPYC3g7PKRk4GNhqF+ZmZmZmZmZjaCqs44OAL4NvB+eT4D8GpmvluePwXMMaYvRsT2EXFLRNzy4osvdlNXMzMzMzMzM+uxtoGDiPgM8EJm3jqUP5CZx2TmCpm5wkwzzTSUIszMzMzMzMxshIyq8JnVgA0i4tPApMDUwJHAtBExqsw6mBN4eviqaWZmZmZmZmYjoe2Mg8z8bmbOmZnzApsBV2TmFsCVwMblY1sD5w5bLc3MzMzMzMxsRHRyV4WBvgPsHhEPo5wHx9dTJTMzMzMzMzPrF1UuVfivzLwKuKr8/1FgxfqrZGZmZmZmZmb9opsZB2ZmZmZmZmY2jnPgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxaGjXSFTAzMzMzMzOry+VXLDCk76291iM112Tc4RkHZmZmZmZmZtZS28BBREwaETdFxJ0RcW9E/KC8Pl9E3BgRD0fEGREx8fBX18zMzMzMzMx6qcqMg3eAtTJzaWAZYN2IWBn4EXB4Zi4I/APYdthqaWZmZmZmZmYjom3gIOXN8nSi8khgLeDs8vrJwEbDUUEzMzMzMzMzGzmVchxExIQRcQfwAnAp8Ajwama+Wz7yFDBHi+9uHxG3RMQtL774Yg1VNjMzMzMzM7NeqRQ4yMz3MnMZYE5gRWDRqn8gM4/JzBUyc4WZZpppaLU0MzMzMzMzsxHR0V0VMvNV4EpgFWDaiGjcznFO4Ol6q2ZmZmZmZmZmI63KXRVmiohpy/8nA9YB7kcBhI3Lx7YGzh2mOpqZmZmZmZnZCBnV/iPMBpwcEROiQMOZmfmniLgP+F1EHAjcDhw/jPU0MzMzMzMzsxHQNnCQmXcBy47h9UdRvgMzMzMzMzMzG0d1lOPAzMzMzMzMzMYvDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZSw4cmJmZmZmZmVlLbQMHETFXRFwZEfdFxL0RsUt5ffqIuDQiHir/Tjf81TUzMzMzMzOzXqoy4+BdYI/MXBxYGdgxIhYH9gIuz8yFgMvLczMzMzMzMzMbh7QNHGTms5l5W/n/G8D9wBzAhsDJ5WMnAxsNUx3NzMzMzMzMbIR0lOMgIuYFlgVuBGbJzGfLW88Bs7T4zvYRcUtE3PLiiy92U1czMzMzMzMz67HKgYOImBL4PbBrZr7e/F5mJpBj+l5mHpOZK2TmCjPNNFNXlTUzMzMzMzOz3qoUOIiIiVDQ4LTM/EN5+fmImK28PxvwwvBU0czMzMzMzMxGSpW7KgRwPHB/Zh7W9NZ5wNbl/1sD59ZfPTMzMzMzMzMbSaMqfGY1YEvg7oi4o7y2N3AocGZEbAs8AWw6LDU0MzMzMzMzsxHTNnCQmdcA0eLtteutjpmZmZmZmZn1k47uqmBmZmZmZmZm4xcHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrKVRI10BMzMzMzMz662ffeEzQ/reHmf8qeaa2NjAMw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMysJQcOzMzMzMzMzKylUSNdATMzMzMzMxs7PbXXX4f83TkPXb3Gmthw8owDMzMzMzMzM2upbeAgIk6IiBci4p6m16aPiEsj4qHy73TDW00zMzMzMzMzGwlVZhycBKw74LW9gMszcyHg8vLczMzMzMzMzMYxbQMHmXk18MqAlzcETi7/PxnYqN5qmZmZmZmZmVk/GGqOg1ky89ny/+eAWVp9MCK2j4hbIuKWF198cYh/zszMzMzMzMxGQtfJETMzgRzk/WMyc4XMXGGmmWbq9s+ZmZmZmZmZWQ8NNXDwfETMBlD+faG+KpmZmZmZmZlZvxhq4OA8YOvy/62Bc+upjpmZmZmZmZn1kyq3Y/w/4HpgkYh4KiK2BQ4F1omIh4CPl+dmZmZmZmZmNo4Z1e4Dmbl5i7fWrrkuZmZmZmZmZtZnuk6OaGZmZmZmZmbjLgcOzMzMzMzMzKwlBw7MzMzMzMzMrCUHDszMzMzMzMyspbbJEc3MzMzMzMyG0/777z8i37VqPOPAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpyckQzMzMzMxt37D9NF999rb56mI1DPOPAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpyckQzMzMzs7HE/vvvPyLfte7cv+hiQ/7uYg/c/9////LrVwy5nB1/vdaQv2vmGQdmZmZmZmZm1pIDB2ZmZmZmZmbWkgMHZmZmZmZmZtaSAwdmZmZmZmZm1pIDB2ZmZmZmZmbWku+qYGZmNp4aanZuZ+Y2s+Ew714XDPm7jx+6Xo01MbOBPOPAzMzMzMzMzFpy4MDMzMzMzMzMWnLgwMzMzMzMzMxacuDAzMzMzMzMzFpyckQzs0EMNXkcOIHcSHKCLbMK9p+mi+++Vl89+tjPvvCZIX93jzP+VGNN+tusV94xpO89t+YytdZjOCx58pJD+t7dW99dc03MRpZnHJiZmZmZmZlZSw4cmJmZmZmZmVlLDhyYmZmZmZmZWUsOHJiZmZmZmZlZS06OaGbjpPsXXWzI313sgftrrMloQ02yNT4l2DIbVw01eRz8bwK5oSb/dOJPMzMbKs84MDMzMzMzM7OWHDgwMzMzMzMzs5YcODAzMzMzMzOzlhw4MDMzMzMzM7OWnBzRzLq3/zRdfPe1+uphHdl///1r++7lVywwpHLWXuuRDzyvM4FcbYbavt22O/bUXn8d8nfnPHT1DzwfavvuZrsYXy158pJD/u7dW99dY01G++XXrxjyd3f89Vo11mS0obbvgW27LkPtt+F/+24zG/d5xoGZmZmZmZmZteTAgZmZmZmZmZm15MCBmZmZmZmZmbXkwIGZmZmZmZmZteTkiG3Mu9cFQ/7u44euV2NNxg8/+8JnhvzdPc74U401qZ+TEFUz1CRbw5Vgqx/VmUDOxj73L7rYkL+72AP311iT0dx3j9n41Hebmdm4zTMOzMzMzMzMzKylrgIHEbFuRDwYEQ9HxF51VcrMzMzMzMzM+sOQAwcRMSHwS+BTwOLA5hGxeF0VMzMzMzMzM7OR182MgxWBhzPz0cz8N/A7YMN6qmVmZmZmZmZm/aCbwMEcwJNNz58qr5mZmZmZmZnZOCIyc2hfjNgYWDcztyvPtwRWysydBnxue2D78nQR4MGhV7cvzQi8NA6WU2dZ/VZOnWX1Wzl1ljUu18m/rbdl9Vs5dZbVb+XUWVa/lVNnWf1WTp1l9Vs5dZbVb+XUWVa/lVNnWf1WTp1l9Vs5dZbVb+XUWVa/ldNP5snMmcb0Rje3Y3wamKvp+ZzltQ/IzGOAY7r4O30tIm7JzBXGtXL6sU7+ba5Tv5XTj3Xybxs76+TfNnbWyb9t7KyTf9vYWSf/trGzTv5t445uLlW4GVgoIuaLiImBzYDz6qmWmZmZmZmZmfWDIc84yMx3I2In4M/AhMAJmXlvbTUzMzMzMzMzsxHXzaUKZOaFwIU11WVsVddlGP1WTp1l9Vs5dZbVb+XUWda4XCf/tt6W1W/l1FlWv5VTZ1n9Vk6dZfVbOXWW1W/l1FlWv5VTZ1n9Vk6dZfVbOXWW1W/l1FlWv5VTZ1n9Vs5YYcjJEc3MzMzMzMxs3NdNjgMzMzMzMzMzG8c5cGBmZmZmZmZmLTlwYDaOioho+v+Qt/XmcsZl4/rv7KYNjA/6cf33W536rT7gOg0mIiYcpnLdlwzCy8fq0C/9yHDwNjL28oobh/X7htkP9YuISSJi1ohYvp/KGlDuUHceE0XE3BGxSGa+30UVhmU99cP6B4iIpSJiihzHEr6ELBIRW0bETI02MJTl3i/rqm4RsWpEzAPQWP/9MFhrLO9+a5N11aeOZRwRU0D3dRqOg+sa6lRL0Dcz3ytldP0b6+pL6jJc2+lQf1PZ/y8M0OX+ti819pMjXY/hUONJltq2h4iYre59Ur/sxyNi6nFxGxmoX5Z33cbJH9UrETFBREw/3H+ji6/vGxGfGFDeiA6KI2LhiNghIuZtGnyMZJ1+BfwAOD4itml+YwjLvpayImLKiFgtIj4NGoR2uowiYlngXuAK4JKI+HFEjOqwHrNExI9QO1q9k78/SJkrRcQ3I2Kuflj/EbEocAfw24hYOiIm7YM6TR8RH46IaZpe63T9Tw6sApwGrAdcHRHTwuhBbYdlHhkRn+ukDi3qtVZELNR8IDNSyzoi5gV+BswTESdFxJTw3+1tWM7UduCnEXFJo04w4m1y1ojYprn/GEKbXDMiVoDagjSnRsR1EbHcUOtU6lLnwfVMEfH5iJhjqHUq2+5MEfHxiJhhKAPsiJgjIu6MiI+DfmNETFTeG0rgcFXq60u6FhGT19GGImLOiJg4IqaKiGUjYpohLu+5gN8DP4uIMyNi4qb3htSuIuLTEbF3RKw4lO/XKSI2RPvJXUoAYUT3k431VlNZkwMr1jQuqWXMHRFLAk9HxJ4RMVU3gciImLmbvqROEbF7RBwJnBsRO5bXhtSGImKdiNgpIraIErAb4rJeumy/XamrLxkbOHDQnZ8Am46pA+tiY5gvInaOiANg6JHriPgIsCFwc3k+YSlvxM5gRcRKwDXAwcAdEbFvRIwq73V1a9Ah1mcdYFFgL2BPYKkyOD4kIibpZNnXWRZwPPAV4IyIOCwiRg1hvZ0IBPB94ErgE8CcHdbjF8DUwAzAZmVQvExEzNBhXQCIiNWAo4GVgdsbnfUIn1V9DngZWBo4DNg2Ij4E7BgREw5lkF2Dn6M28LmIWK6x/qtuIxGxFPA0ukXQ4Zm5GXABsENE3BIR20H15V4OGlZDQahGYGuKaDqorVjOAsBlwJeB9SJi7vJWLQPAIdgf+BOwI/AasFrZ3pYFsq6BaadK3/0V4M7MfDMiJoqIqfXWyNQJBVimaRpUT9B04NZ2GykD4XOBLSJiu4hYEIa+7Zdt4Q5gQeDMiDipvLVO6acq1SkinoyI7Utd3gudDOgmgHAQ8CNg64jYqHGA2zjQqlCnpVCfdD2wHXBPRGwVRQf1WBJYAC2bayNi/sz8T0TMmpnvD2F88j1q6Evq0OjfykFDDDWAUMq5F7Xti4FvARdFxCcb5XVQ5sHArcCWqO9dOSIOjoiFG4GpDuv2kVLm7MA5EbFEREwaI3DGvyynU4BngE3RuGDbiFic0fvJngUQmtbbbhEx/5i216r1adrezqZpXNLcrjqoV51j7neA+1B7uikivlT2uRt30i7LmPsPNPUl5fWeB3xKe9kS9R8HAatExELNy6eD37UE6mcXAT5ayu14WZf1/1dg+4hYe0xj2yp1qrkv6XsOHAxRRCwErA+clpn/jojZI2KFiFgZutqRngLMCKwbET+KiOnK3+t0XX0B+FVm/qM03p9GxAMRsWuXA6NuHAycBDwJ3ABsDqwJ7ALUcka7Q18CDsvMfwCLAZsArwMLAbdExMy9LisUrZ4zM7cDpgfmBX4eEb+MiA93UMbswLcy8zTgEGAm4IDy/gLRZgZB6OB5vsz8RmbuDCyPghG7AD+MiPmq1GWA7wA/yswvAr8Bdo2I+yJitxiBwFGxIvAi6ui/iQZGNwN7o4DJSAQ1TkJBn9WB7YE1I2JXqm8juwA3on7kO6GzRZsBL6Ad9rcj4ogOdmTrA8dn5qsR8RngKLT97h46a19JZj4CnA8sAXwe+FJoVs2+Uc6G9kroAPw1tI3ODSwFrAN8DLgJuBr97pGwHfBKZn6r9N3XAM+iZb5hrytT+pP5M/Pw8vxLwIkRcUxEzFwxGPk5tO5vQut/m4jYJCJmKWUu2kmdMvNdFOg7EAW1Xo6Ix9GgbaqKddoHuB34RkTcFREfycz3m2YgTDP418foCLR/mxjt27aMiIOovt52QQcNbwHzoCDi6llExIxVCsnMi4GdUP92Pjoguh8Fbjsan5TB9FvA3yJiMj7Yl+zRYV9Sh13QYH994LqI+BR8YBZLpWUEfBUFMj8LTIUC7P+HguwAs1ZZTqHZJfMBP8vMV4FPA1ujoPvNMWD2YUW7AQdl5k5oX7kn2nb2jIj5h1BeN7YALkLb1quoH1oXBZI3RmONXmqst/lQEOOLoWDhhPDBKf4V7ILa9iulvMuAXYF7y7hklg7qVduYOzP/BnwX+DoKIn8D9StfbfQFFYs6GDiuBPy2pIwhRuhkzbeBEzLzEuAqFIj6XuPNDtfbAWhZ74xO4K7TOEgvZU1WsZxG2/4nOqGxdWi251SlnLkq1qmWvmSskZl+DOEBbAP8pvz/k6jRnAKcgwYko4DosMyNgMubnl+LOuvLgY07LGtb4LfA5GiHsw6wNnAJOps5YY+X1wrAueX/+5Zldi86s/JeWWZL9KguAUwCfKT8fxQagC7e9JmjgZV6WVb57I+ATcv/vw48haKq+6MzGUtWqM+qZR0vD0xcXj8DeKT8/07gwjblLIwOnr4E7A48WH7b8sBxwBc7XOZTle1jcxTEeA4NOtYs286JI9AmJ0SXlzyEDs52Kv++VJb1P4Gle1mnprrtW9rUemgK7HtlXUzb5nuToR3gjGhHfSs6OH65vLY0cCbaqU1RsS4boYOP6dGAfbWyPR8DnNpoY+3aZfl3NXSGf340MHqs9AHzA5P2eBmvAxxeftPvUbDvT+X3XoTOaH9kBNb910r7+0RpmzegQMu9ZT1+tMf1+RU6+7Uw8EXgz6VdHlf6p3kqlDEXMHv5/1LoQOgXaOC+N/BMh3UaVbbfXwLfK6/9Gfhbqesn2nx/KhTInL4837Us23Ob6ntoh3WasPy7HzqIWAoF2f5d2vqCbb4/GernLyrPv1p+z0vAVmXbvQCYu2I9NgP+WP6/NDroewnYpbxWeXyCAqvnAGcB1ze9PiMd9CU1tMX/9m9Ny+gBdMa40b+dUqGcSdAB/vplW/98+fds4C5gD3Rg/CFgtjZlTVDa4U9RoOfupvfWKq9XWtZo/z0pGjN8Dx3MvlTqulzZ5k6nQp9b0/IeBXwYmA3NYLkZjSvPKtvLfSgAO0OP6tNYb7OU5431dkpZPusCV3TQlr6O9oebo2DNu2h/tCHqdx+m4riEGsfcpR0sh8b+gWazPlvqc0aV7Y2mMXd5Pg0a33y2PF+eNn1JjettirKsl256bR7gkvL/1WkzJm363ixlG1i46bVvAb8u//8wsFPFtr18Y/sG1kD7umPRGGAb4MEO2uQGdNmXjC2PEa/A2PpAUdZTysZ4FBpITY2uKz4OmG4IZX6yfHcF4MfAleX1L6Fo4+odlDUFGph9DTh6wHu/pwyYeri85kAD4cnK7zkDRVIfQ9G5V4Aje1ynCZr+P+uA924GVu5lWWUHMSOjB8Wb8MEAxMGUoEKnv7O0rbvQIP1FYI0K39sMuA4NYvZsen1ndAa603psXjrTs4BrB9TvPHo0+Bjwd+dFO/yr0cH5NcCXy/sXAx/vZZ2a6rYFcEb5/3llGzkV2LbCdxdHsyUmQQOgRdBA5lHgzVLOnzqsz4/RQPaUAa9fBMzRQTnToqmTK6Hp1E+iQdUJaKZNL5fxDKUtvliWza/RAdrn0BmRvYHdelynKH33uWjQeBewVdP791AOlHtUn+Zt5BzgP8AKTe8fOZRtpPRva6JLu14Gth5i/SYp/eJmwF3ltS8217Hdsm56PjEKhr0CvA1sM8Q6rcboAMTvy7r8SZW2hPaTn0b7yQlKnS5EB/1vAn8HPtNBXb6HZh0chgIaSwB/6PD3zFCW1cdRgO8njW0VBQE76ktqapcTN/+/9E9/K8uo8npDMwI/hw7WJ0FB8oeAf6HxybXAWhXKWaJsC19As+oar29CGcd1+Ps+AvyuPK4c8N5lwFy9Xublb6+KDoRuZ/S47dcjUZemOk1U1tslKIjxlQ6/3xiPnonGO81t6XYqjktQv30EXYy50XHFZE3PNy7b2PmUPhL4ZsX6zIrGfRNTxqelTf+q/P+OTvqSmtfZqPI4G11u9ns6OBmFAr/Ny2kO4Lzy/z8Dm3dRr8aM2FcoY8GK310QBQ0m6qYvGRseI16BsfGBdqKBzg7fXTrQOZvevwb42BDKnb50PCeWDmyLpvcOBL7RYXmfQmfQ7kVTnhctndDlndatpuXWvKF/F51deRlFei+mzRmZGuuxNRpQHTSmTri8d3qvyyqfn6rp/xMPeO/GwdoVOpP/27LOD0ODmMaZ3pnR1LBsVx8U0W2c0ZkITeW+GAUM5gFuY4gH1CjqOjNwKJoWPiWwA+UsW4/W//RlO9gD7ZgnRgey9/HBs0U30+MzzmPYRnZD17pT6thyGynr5rMoSLDWgPd2Ap4vj7uBRSrWZ1T596MoEHkHOlBbo7SHtmd30GBznqbnC5bt4jLKIB/4VI+W7ySl3o2zslOgYMHL6IzOo8AjaMB/N7B2D9f9JE112qz8/dfRWaZdUSDjzV7WqdSnMWtpEZqCGOW1QbcRtA/aDh1sbjSG9/dq3uYq1OUz6JKibSmDQzTwfw9N461SxvQoaDUKmGkM7x8A3DCE5TR10/93KuXcXZ4vxCCBsbLtroLOMC/Y9HqgQfFtKGhw2iBlTIiu014WBQsmQwe0d6O+f6IOf8+c6GDqNGC/Ab/t7tIfVO5LamiHY1xGTe//ALi5QjlLoZMYHweWH8P7v0SXYnwVOHOQciZAZyxXbNp2J0cH+xejfctddHiwAExe/p0W7R8PRrMs5kf7yj/3aHl/Gp19PQvNgJsYndi4sqz37wF39KIuFdfbfsBtHbSl7Ut7WgTtW2dC/dTHyjJ/nDbjEkaPrxp95GYo0H8XHY65UYD28rK9N/a7M6H95F1DXGYx4Pkc6OTkboP1JTWvt9XR/nT7gdtC6Uv+CZxfsazpUZ82EQP6bnSC6yLg7Ip12hgFef5n+6zatpva5LqlLxi4vCv1JWPbo9HobYgiYl100DgNOhCZC9gkMz/WQRmTZ+Zb5f8zo0HQgmga3ckoGn46sFlm3jtIOTOgs7r/QhvWucAbaIOdv5QzJTpj9XBnv3RoyjViJ5f6zAncmJlblaQtFwH/QAeqy2bmQz2ozxJoR/hztGzWR8vmR5l5dihh2w+BvTPzmR6W9T20HD6MAgQHZuZLTe/viy7l+MIgZVyEBjGPozb0EGqXx2fmHyPiLDQzZqHMfLpFGT9BgYJVUBT4r2ia1SroGrlZgb9m5vcH+z1N5c2MBjuToGnNp2bmaxGxBTpgewYN4rfIzAerlNmtiDgT7UAnBaZDZ3S2Le/NlJkvRsT3gcVS1wb2ok7fRMt2PuDxzPxu0zby7cw8rs33ZwD+iAZBT6Ppli+gIMI86IzKL9GB3p8z87425U2amf8a8NrMqP3MgQJAL6GEaY8OUs68aCrzlo0+p1zv+StgxcxcdrB61C0ifoz6wAfQ9Nt5UWDm0IjYEx10TYICbLdn5sE9qtc86KzSRWj7uxPNDNkZTXeeHM1oOyMz9+xRnb7J6GX0YGbuP+D9xuVlY9xGSn6eG9FskjfQIPFV4OuZeWf5zPrA05l5W4X6LIX2g+eV8hZG6+qXwGuZeU+FMmYoZUyPZrvcgWZxXF76pYnR/urnmXl9u/JKmd9DwdAV0fb1QzQOOB5tH6dUqNOf0NnGF9AB2pXoUonnyvW6j6K+fOlW+8mIOAKNG6YGbkGBkT+iZfRKZt4UStT7TsXfdSrqS85H44qjy288G01bnx14rF1fUoem/i3RLMV/ov3b78oyGoUSk52emTe2KedidKncHChI9zQKYD5ecjXsj37vdMCqgyzvn6D+dl40Y+3bTeO4n6PZObdm5ukVf+OsKDg4ERrDHZ6Zf4+INdAZyylQO9sudQ38sAndLewxtI38E+1DHkDb7gMRMROazXZv1e2ky/o01tsTpU5zovHEkU3r7bvAZZl5U4WyLkQH9ZejgPFDKOCzLJr2Plv5+EaDjUsiYuLM/PeA1+ZC09xnRP12pTF32d4eyswflDwdM6J9wQvAA5n5r4iYKDP/06ac/9l3l9cDBbv+ig54h33MXdrR1ag/exEFfV5Ey6PRjm4BNszMO9qU1ei7Z0Dt4E7Ud1+ZyiuxExqLL5+Zt1es0wvokpL/1ql8ZjsUOLilTX0uLt9dBO1L/s4H2+T30QyGQfuSsc5IRy7Gtgejz1Q1gi4ToEbxJTQFd0c6vFa/fO8atCE3XpsRTZ09D+24969Qzu/RdLmd0dmA69GZqonRxjYpTWe0e7S8LkJTfvZFZ8PfQmfNtijvbwV8p4f12Rs4qvx/4rL+NkYDt63L65UuM6mrLNTpPIw688XQQd6dwI7l/WnQWbZZBiljDTQA+mF5fgeKVF+JdvgbltfnHaSMeYEnyv9nQYGwE1D+ibnK61NSouEVl1HjgHU3dLD42ab3VkWX5czfw/X/UbSzeRCdvTwSDRyeRTv7ydDAex8GXHIyjHVarKk+Hyvb+1N0cBYezVT6RdnGvlza4aOMPgC9u9EGKpZ3GbrG88NjeG/y5n/blHMSo69BnxtF5venzGgpr3d0JrTL5Xxd0/I+uGyrD6OZGBs3fXZSephzAx0UvVTawKfL8n8WzRqaAh38zdirOo2hTf4JBfkaM0TabiMof8EZA177bmmXO9B5DqBDGN2/TYuCbF8vbexj5fUJ2pRxFPCT8v810EHC4TTN5qODmW+Mue++gw4uKSvb7t/Qge/26MD8VTTLYKnymW8yyH4SHcT/vfx/EhTo3RENpDvKR1PKmJcP5jK4Dx1YHYdmMPZ6JtYR6EAaFGjeGE0pP4TR17vPXqGcQ9F+6LTyWw4py/mPaCwyW2lb30PJCQdb7/c0bQunlmU9aPtrU7fTS1v8TKnX15veWxz1n73aJx0HvFT+PynKB3IvGqdugPaTPcmz0Lzeyv/nQGd3DyzLfbny+rQdtKV7yzpeEvUhZ6Hx8odQ0O+TDDJWairrfHQgvvAY3psNBYGmqVDOtKUNLlye/x7N9jmh/I22l141lfU/+26a+lp0ILt3j9bbvsDJTe1oenQwfTWjx6SVLgmmTd9dlnXbSxTa1GmD8nqVvE2NvuQklMdkedQnnYUCjzOU9boPg/QlY+NjxCswNj0YhkEbOivwAxS1ehhlHQWdeZ4OTT2chvYDogVpml6JDspORQdtO47Q8poHDRJ/W57PVza081DE/wdlY+/lAH1xNABqvrRkQnQm9Xhgyl6XVTqZPwx47SPooO/kUuagiePQVKgfoJ3P9mgwNHHpYG9Fswb+e61bizKWQzudmQf8xmPRznHmKr9nwG+4ten5ZqUeszS91tHBQw3r/yul/f2W0YmorkFZi29A17YvRo+SfZU67dDYRppe+3hZb/uXbaTd9r9V2XltUdrB4qUtHFN2XFsBe1WszzJoquXBpU/6NU0DVrSjbbveSns7Es2UAgWxflrq9CpDvIa8i+X8C+B9FBA7r6z3LdAlZ/eh4Mr+9CiQ0VSvj6CD8lPK86MYnRT3eXRA2es6jalNrlW2kR+U54NuI6i/P5YBCaHQQe2v2vVpYyhvzdKmm4NOM6JrSY+lTUCzbO/foikIX7atz6AzkLsOYTmNqe/+KDowOQkFfQbdVlBQ9Zmm5wugwed9qN9tnKwYrO+eBuWgaD5YmLHU7zI0e6qT3zV5Wdb7oQO0e5re+wqwb4/b41bAT5ueT4gG6scA+3RQzrpl2V7X1Cb+BtyPZotcxOhgTctxCTrobM5lMDu6hrmRAPSzVAhkNH1/ReCWAW39JkYfSE7arh3VvLz3R7NDG8uosZ/cB10G+YNO21SX9VkXzQKiqU6zl/VwcIdlfQXtW5v3S6eU7e1tNGO46jp7AB28voauix/V9P5Unawz1DftgWY9NMbMs5Vl/vWKZSzD4PvuaRrLr0frbXlKwKfptQlQ8OKIDspp13fvXlOdDuuwTZ6F+vpRpY4nlP7kbhRInLV8tqeJv4f74dsxVhQRSwPXhu7x/d/bIzb9O+1Qyk3dVupn6HrI1YHXI+IxNMVzqsy8ITNfy/a3l3oD+E9EbF6eN7KonwJsFRHLDKV+XXoeTSlauCy3aVGAYwfUSS9XXqty66y6PFAet0TEd0H3787MC1D0eakRKOsc4MWI2KDxQmZeg5bPO+i2iP8z9axZZh6LduZ/RwfBZ2XmvzPzKnSQvCi6FrXlsk5NF74B3SZxuYgYlZn3ZeZXUefYybIBBb7OBRr3ff8daqfLlNd+jgJkPZOZJ6LrqhPN8PkXGhD9Ax3kLgq8nJn/7GG1/gBko28p9bwMBVpmR2dS2m0jV6LZG+uinekFaGbPfqhfmQ4d0FTxPjo43Bsd5E0K/LVM5Qctp7XbFZKawvknYIXQ7T3vycw9M3N7NM11oYr1qUXq1k3fQjOF1kcZ50/LzO+gA/WT0dmsaXpZL3Sg+Bowf9km5kfXhH4LHTwsOQJ1GlObvAJdrjR76DaM7baRZ9F+6PKI+GxTOdejvmTNDut0FerfLoiIbUtZL2XmYSjYt9hgX06N4M4DVo+I3SNi/sz8T2b+CQUfVi/TWDtxDv/bd1+Nft87VLsN1wXApBFxdRlnTI3W9xbozPbGjaIH+W2vAZcCP4qIr0XEFGXZ/B8aS3ym6g8qffVb6Izah9F6/EPTR95D/UIvXYn6kV9FxNJlP3srpX+L6reFvR31O5NFxEdQP/Ak2tf+G/3WlctnB1tvx6Cz8kTEhKlLER9H28ZipV7PdvD7pkSzIBrL/0oU/G/sH49BgbteOQJ4JiIeANZt2k8+jILan8vM+3tYn9uBRSPiktBtU7Ms88OAVct2U9Vl6EDvOxHxObRf2hIt37upvpwnA36cmbuh7XRW4PmI2KW8/wMUXKjqApRc72h0IpHMbMyGrNpXttt3Hx0Ra1Xok+ryCDB96LaUjdumvp+6fGeFiKg0JqnQd6/WQd89WJ1WjDa3Km9yOxovzo2O3/ZCbWA11PcvxOhbaPdqeffGSEcuxpYHir5fh65jG9P7+6HreDstdxSjb+ezd3ntz+iaq3voIAkdOtN9LpoqeS4luyyKHm/Vad1qWm4fRTubd1FU/8vl9b3QdOyeZK1Hsx9WZvS17Yuha68fQR3PkTRl+u9VWU1lfhIdJPwEXSYwdXn9DmD9Qb43ERpgrlieT4Ou2ToeDba2QcGbQW8rw+hLbxZAZ4V/gqZJr1Rev4XOst42kitOP+D13dAU5lVRroRetcPGLY3mLst3B7RDPhQNFP6EBtgP9qpNDqjf9o36DHj91nb1QWcHp0eD32+hQeYf0LWpN6Cz2XfQQZIuBkzVL+vrT2g6/Z0dlDMTisr/He34G+3ic8BVPVy+zb9lUjRT6L/LG2Xk3rzK8h6m+i2Kzli+i/KANF7fDQVgx5o2Wbavecu21sjG/zCaQbEZmhZ6ewf1mJ5y+V+L/m0fms7WDlJOI4HZ6mgq+IEoEDI9GmQ/NMTlNKS+u3ymcdnPJmh23qtov78HOlh4kaY72lSoy/po//MbdMnSxOhM+tYVvz8pmvHSPCtsWnQm/gy0P7+zk76khnbYWEYLMLp/2wfth9cE/lZl3TP6bOtkjL5LyP+V9fc5yjX8lGnMFcpsJLBrzAj5ZmkD5zOE8RZlBk9TeZujfBkfYQh3Zhjisp4ajWca/fRn0X7xYrTfvhtdbrpmj+rTar0djwKqn0PX/1duS4xOanknCq7fj05mrIn2lVd1UN6kfPCOWmuUvuAtlP+hbRtCAf9GG58KHUu8iQJJq6EZBJUT4pbl1PW+u4Z21Dxz9bNoLHIxuvRtB5pmMVVpB+XfIffdddVpDG1yPzS+eZ3Rd2V6AO27j+jF8u71w8kRK4iIz6Op4DuhQfA96D6hrzV9pnLSoRZ/Yyp0b+s70XScpSJia+D+HCTZS0kK9Gk04LwOdYqroiysfy+fuRtN5bl0qPXrVElkNQuK4j+IOsdvoIRWp6Od0OOZuUwP6tKcWOnv6IDqjlKP5dA9W28Gbso2yUtqLmt1lOxoOnR93d/RFLNl0eB8HtRhfnqQMg5Dy/U1lMzoQjRV6naUffYQtBPbIismsSoJa76AZofMVer498zcouL3Jyn12DtLoqpyFuX9cmbgN2gH/s3UjIhhV5bTMpRLetAMiuvRzmJJdID7GtoB9eTMzhi2kffRoPhDqD3Nhy6DaVmfiDgUTbF7C+3Q7kPtcXK0A/sHmu58cWZe2KY+k6Kg0Q/QrIv3G+ut6TP/RAdDVwxSTqADRzLzsfLal1EA8yZ0LeGXgW+lzqz1RFnes6HEY83LexW0/P5Km+Vdc31GoevQp0Rn8x5EAYTd0KUmp6Nt++nsURLJmtrkBWhA/Tg60D81M8+JiG0YfQvOm1KzqtrVpzmZ4VPoLGxz/7Y1uvzp0sH6tzIjbCmUDOvX5TcuiWZ3bIS2mVMy8w8tihhYXh1994/QQetT6FKLN1B295nRoPZqdCCyXWZeMkg5kWUgF0ru+KHyW7dA6++BzNy14u86DA2uv1T68f9QrmdH1wbPgRJJHlOlvG6V/m0V1BbPQ9vpFOiAajO0jH6fmee2KedX6OzvFGgG3m0oJ9Dn0eyna1DwcC/g+5l5+RDqOj8av91ctQ8pM1ZXQOOIUcDDWWbylFkU56H+YZte7Csj4i5Gz7q6Ha3za1FAZQ104HZvZvZkxklZbx9GY+7b0AHZs3xwvf05NcuzXVljakvrobu+jCp/YwZ0d5aW+6XSb6+Hto1rMvP18tp7Tdvhm2g/Oej+LSIORycuTkSXhlxfXp+31OvfaPs9s93vayozMjMbs6Eb++8q++66RMRp6Iz7fWjW2aWMbkeNYMjVqZmV7cqqpe+uq05NbfIZNI68ErXJHdEJxb+ik797o7vRdNyX9DsHDiqIiKvRdXRXl4OqQ9AZ5RPL+x8YXFcsc1m0g/8b2lm8EhHfQtfbbp9tsqg3lXMYGmzOhnagBw54fxd0zd62ndSvG6Fs3Luijv4ZdPB5Kzoo+RAKGkwD7JKZj/egPkeg2U67RcSSjL79znuZuddIlBUfzOz6Mrqly/3At1HU+UPooO/xHOSODBFxOoqeH44GoZOgQftTqbtXLImun315kDImzMz3yv+bB6FzoJ3rjMDzmfl6xd/2IxR5fQ0FrC5sChyMQoGVZwcbVNcplOX4etS5P40GwfOjHdF30XWNW6A2el2P2mRzluiB28jiaEf2CHBfq/pExMFo0Dk3ymeyONqhTowGIkehdvl2xTo1HzRMiIJjczYFID+GBh6DZvWPiENQgHB1lBxv56b3tim/95+Z+dcq9epWCWRMz5iX92/Q2d0A/sIgy3sY6vUrFPRJNMPsadRn/gSty4PRQcOXxqI2uS+6BOhzoctT5kNByH+ivu3NTvaVEXEU8HZmfisi1kTtfVa0zzy6YhkHoYHnfmiQOD+aFnxmZj5TfveEmflCxfK67rvLttuYZbA0mllwJjoovy4iGrfkXLLVQX9zX93qvfLb/lFlmZff9SeUJPSZiNgfrfe30N0TfhBjyCI/XMoyWgydyd8azTh4lA8uo/fb9W9l/S+C+v/l0cyXSdGB8TXAi5n5dum35s9B7lxUoc77Axdk5s0VP/9TRs/MeRlNcf5T4yRPRJyNxhubDLVOVUXEL9F+cAHUpvdGy+zyzPxRCSQtg9r18z2oT2O9fQUFChZA45vbM/P/ImKyDvZtB6PL+E5BwaOBbenrqM97NXWp0WBlNY+5zxm4XUTEhsCnM/NrFeq1BpqBdRU6I35d+Y0vZualg23jA8rZFng3M09ueu2/xyVl371hZu7erqxuRcQPUf6LTSJiHbSsl0bt5kfR2Z1daum766pTU5t8Cs0EWhLtI3/R3CYj4oDy9zYepLixV/bBtId+fTA6Kcxny/OJyr+fRQOF7YZY7uLo4OksdGZ2z6b3Vmr++23KmQ8N4kBT1s9BZy4OZ3QUdTqa7i/dg2W2GDqj9xCaarUp2lkfis4czYvOYLTNyF5jnVolVjoWnV3oeVmMObPrD1G0stI0wPK3v1va0ndRtPMAdCD8KBUSWNGUdI2mO4ZQpt/RYZZotENtJFXaAh28NqZgNgKVc9BhosUu1//caDbGaU1/fz20s36RHmcIL3UYU5bo/24jFb4/Cl3e9BN0WcIoNJviCBQQuRIdBExRZR2W9ncdo5N77Y+mJh+Pbjc7c9lu2yXpXARNZx2FZnGcQw/vmjKG+kxQ6v7jpuU9FzoArby8h6Fe86Gpo79CfffFaCbTTWhq8CLlMzOMLW2yfG8Tyl00yvOJS7s8ENitw/qMKSHWxGg6/oXAHhXKmATdpeDjTa8thaaoX0cH2cqbvt9V3122jdOAR8vzCdHB7C0oMPPj8pmpGCQBZVk332TAOIHRfXfHCTXRNeMfQWfmrkQD40+iQGTlu7zU0BYb/VvznU4a7egSyl00Krah/dCdpq5DOWNmR5dyPlnKOgiNk2ZhwKV1A8pamQF3SSr9S2N5V77bUPn83JRp9qUd7Yauv/855VKQUqee9AGlDf6p6fkW6GzucWjcs2QP139jvW3T9Nrs6FKFs2hzCdAY2tLpKHh5Rtn2tuy0LZWyxjTmPqZsN9uV9TjodttU1oTls8eiy8I2RQl8G0HWSgma0Zj71abftvaA96PUdZIerbtvoJOCjefTopxIv6GDS3iose+uo05NbXJv4LamZX8jOh48gdL/0+N9d68fTo44uAky81+Z+cfyPAHK828Cm0bEhuWMVicOAI5PRZH3ATaPiCVK2Tc2TTFqF2mcF3g0dJnDR9AZvtPQdK710XXv/0BTIHtlbXTm7q9oSvgX0U7nAjSt5yfAZFnud9wjVzLmxEr7osQ6845AWReiHQSljb2CDtaORlPw2krNEvgDOkvxaXTAODU6+LgE2DIGSRhTzgi+UqLtZOZ7JUqdqdkB0wPfjYiJKv4m0ADtrPL/a9DO/tKImDczM5Rw8WlK8p9eSJ0xvxJYPiI+g87sXIja5kPoVlO9dhW6vIiyPG5DA8Z7UWR9UKmkqpei6bcToYHHCmigeSkKHD2FBrptzzaW9ncDStC3ELrm70A0GJkF3fLqbdTWBrMVSjr4bma+iC572DBK8tiI+Gjpr4ZdaNr9+eh3bIgOgsjMJ9FZglPRGYwdelGfAeZFB4mB+u4Po2vTz0aDye+lLvN4pYd1uoou2mRxH7BFRBwbSqD479T9y08B1g8ljquk7P8GJsT6d2aej3LJrDpY/1bKeAcdJG4dEYuU2VV3ZebmqO/cqGp9mnTVd5dtdyKAiFgL9d0roJkv16Jr21fMzDfQ2f7/UWaSbYmCO/MPKP/90H3gzw5dftSJm1Dg4kDg7My8OzP/jNrC5zosa8ia+rcdI2Ktckbw4czcB20jW5XZa+3KSbRMv4pmwH0IBVOnRidYpkCBxJVTZ9H/MaZyImI5lMT6AwlBU0nV3o+Imel8eU8FPBERc6WSDx5fyr8f2DciZit1GvY+oIw5zwCWiIh9ImK6zDwN3f3qDDQN+8whjHWHpGm97RoRW0XElJn5TGYeX+qzTURMU7Gsd1HOgNfRNn8LmsEwFRqjbBUR60fErBWKm5f/HXOfji5/WB/Ntmq53Q6o13vlsyehs9NnoksM70f73LYzFoq1y+/aE+3Dd46IIyNi0fL+eujk4ZAvpe7QbWi7bbSjV1PT9U8HvhQRc1YppOa+u+s6NbXJ3YCHI2JKdEnaEiiAtBpwaEQsnpmP5SCzfMd6Ix256OcHiv4dC8zU9Np/b42Gphh1NOsA7aT+SNMtqtDg7ODy/6VQxtqq5f0YRYUvRdeMN17fjwFJrXq43D6EDlwvQxHig9DB5CloNsJSPaxL14mVhqmsaVGCtgcYcCYH7cxWr1DGxOiMx+rlO1ehA/LFUCbux9t8f010gPl4eXy8vL5803qsdI/d8vkJ0Y5z4OuHlfY45Htc19AO5kM75MfRAH+98vot5fWetcnyd2cp28glNM14QGf1r6pSn7Lud0E71wtKe9ofTaG8BwXvKp8lRFOjr6MMFJte3xk4tmIZjbt3wOgZJueifm/Z0lf16lZQfy7LZ1aUVf41NID8Znl/OnTAXml5D0P9jkKD2ZdpSqKEzl7/fWxqk83rFF3adFBpk7uj4Mgi6LK8yjPNqCmZIToLeAQKzKxMSfxX2uqtdHhmnhr67vLZXdHB6yHADuW12VAyzEFvMVi2qV3R7ejup/TZAz6z4BDbwZroIPZFYNvy2hXokplebyO7lGW0GaMTZDb6t7b3Wm8qZ0MURHwcBZF3LO20kcRs0P6ttOVtyv8XQdPeDwY+2s3yLtvJSaVtnkGZrYPGnh/ttLwalvcqaCbL0Siw/it0qcAbjbbQ4/psiC6V+g7lTHrTeuvoDDoaqx2OgtublmV/PgoWPUnF/RLDMOYuy3xLlER2DpRP4n/GUoN8v9GnTY6CiY0x90md/LYa19uy5Tf9ipJUu2y3D6ITh1XLqa3vrrFOX0JBxO+U5fyZpjZ5GCUB/Lj8cI6DFiJiRbRzPhd1pqdm5k+a3m9cQzgqFdHspOx5gReynHUvZ2F+mLr+5jLghNStQQYro/H3p0MDqYXRQfpPM/OGiPhzqfMpndStG+UauEkz87WImAzteNZHO52L0ZS3f2Tm7D2qTy2JleosKyKmRsvohfL8s6gDehUNID+EBpFLtClnYMKY1dD0tmdQIOMOqiWM+SKaarUUOpCZFOXv2GCw71VRIsTvRcSHUe6OWzLz21Wv26tDKFnYZKVNzowGD2ujM06NM0xzZeaigxQzXPVp3kZeRjvIhVAQsWV9yvWKC6H6X1DKWAsdGDfazVtoJ7vyGAtpXfaaaMe4AbBXZh4fEVegPunUimVMmpn/alr/B5a6romuCf1tJ3UainIm+kQ0ULivaXkfhQaj16Mph22X9zDUbWJ0MD4BGrh8FQ1kb0UHgr9F665X/WQdbfKbaLu6FQ2gnkCD2K+jPu1e4PrMPKJinbpOiBXKIzQzCs7cgJIbr4uubZ8JTX3/c2b+uGKduu67W2y7H0Vnmhvfmws4IHXGd0xlbIgCe58oz/dGy+W7mfliDCHn0oDyGzlgVkeD4n+j5Lh7DLXMDv9+lWV0WWb+qE056zN6n30ZChKuj5JHL4QCLqeiSzFa9m8RsRKaHr01Cqyeh2ZzvYBmixyYo2elVv2NjfHbbGjW2MooqeIfyvt/BY7KzDM6KXcoImJLdKeSW9GMk8mBOdE+ZR3UR/0tMxcZ7rqU+oxpvS1R6rQqygVzVbv1X8r6GBobP8f/tqVl0BhuaZRX7OQWxTTKGrYxd0R8GgWLjsvMQzr43hi39VLHRVBwY6d2v60ObdrRamgfcE3zcdQgZdXSd9dVp6ptspR7Uuo2uOMsBw5aKIGDtTLz0NL57I4a7E8y848R8R3gD9kmc36LspsT0DWmgR+LpgHPlJlDmhIYEbuhBj0b8Epmfn4o5QxVDJ4B9ytosLxNVsiAW0NdakmsNAxldZ3ZNVonjLk5M68NJYwZlRUSGIUSxeyemZ+KiIVLXSZGs2K2Lb+r604iIlYAvp6Z23VbVgd/cwJ0oDiwTb6OdkIbo4DPhb1ok6VOXWWJDl1Wsj06O7U12mHtW8qbD103+x80sPlbZt7WYf2GfNAwyCBmBbTzvzIz1+mkPt0IJWLcA+X8uJvRy/ujaGc/HR1k5a6xXmNqAx8HPoXW4fPoesyxpU3ujIKn+6FlOzmaWXZNZv65TJl9Mku2+Ar16TohVihZ2DfQmfI10YHCV1FeibXQvujdzPxdlTqVMrvquytuu+ugmR2D3bViE5Qc8vZQEtM50FnQt9AMyCEFDaIpSe6A16dDSeOGfbBYcRm9le2z1e+I7tzyG3Qm90U0LfkOdHbw86jNv0yb/i0iNkXtsXEHhNezJD0rfcwcmXnAkH7wmP/eLsBGmblmXWUO8rd2RrkMfoqCKv9GB2c3ZuatoUs0DkVBqVt7UJ9W6+1mNFtsCXTy6Z4KZVVpS6ujmQwfHmJ9hzzmHri/jIilMvOuMb03xLp9GQUYh/TbOvxbVdrRczlIou+msmrpu+uqU9U2iYJ/m2fm2u1+41gv+2DaQ78+aEp2gy5R2BolkLkP7bjr+BuN4M2P0a2T/mfK4SDfGYWmgTaez4Ouu1qRHiafK3/7IEZfn/tllMfhx2hDotTrQ71ab9SQWGkYyvohcFb5/zpowHk0JXkcFabeoYQxfwA+0fRac8KYFSuUsRQ6cG5M/fwWun71THRd3QSUS2fqaN8MMdFiDX/7UOD3TW3yoEabLOu18vS0murTbhsZtD5lvfwFHaTMgC4N+Rq6Vv44RiehrG05o4Prrqc5ost6Ok5EN8S/tXPpA3+Kpia/AJzctLxPKttQT9f/gDYwzYA2sDOjr5ntZeLQrtpk+cyPgM83PV+m9CnHUi5b6aA+XSfEKtvJ7Xww0fCeKCBzEENLGthV39287Ta99oFtt2xr2wDLDFLO7mh2yiQDXp8RTbs+kkES/I2hvJlQsGfSptdGMcSEf122xbbLqGI5gQLfazS9tikK9JxEU4JD2vRv6DKG09DByurostIlmt7fGZ1lrnM5rAQs3qNlfhw6qBpVnjcSxp3A6GnhM/WoLmNab5uMab3V1ZbQQX/H41KGOOYu2/mHaLrMpmxvjcTUlffdTXWYsMX7Q/ptXbSjdZuer41miv23HXWw3mrpu+uoU4s2+YWBbRJdDvktlAtq2Jf3SD9GvAJj2wMNsN5obpAVvjMw6/H/bOjorMGOFcpqZAkf2PFM0KrsHiyT6SgZcJs6s+YMuBuOQJ02QtcyrkXTIAtFoI+ngwFRXWXRZWZXNPXsOBS8uhOdNZ2waZnvh67Zb3ntJ4qM34AOqO9GMykWLJ317UNc1gsP7ND5YMCgp20SXf92HJo6dj2w2YA22cgJ0JMsw03LY8hZopvW8VHorNkxwN7ltVFoyu026ADiewPXRwfrreMs4WiHufHAdd1U5wmq1Kem5TwLOmM+f1OftBaaYng52uG/BuzcyzZZ6jYRusb2qwPawHaMQD/ZbZts+s5maNr3Jwf81v1Q0qhKB3xN390S5cNZZEB72hNNC69Sxo8pd0Nqem06dBnIJzupT/nukPvupu3gWJQsdPKm9yZq2nYH7SdL236CcocLRt9WbuHyfBEUdJmx4m9aFs2YOBPdgWVgXzAzuh542PvJpmX0fWCvAcvov/1bB+XtgS5VWp3RuTImRAcNBzTKrbC8H0Enis5Gs7imaCyP0p5uB5buoF7bAh8e8NqEdHEnjC6W+eLoMsWn0KWkjRwOE6EAyenocq5eBo/2KOtongHL57/rrYO29PNS/zG2paq/C826WnSQdVZpfIMCoFeUPujIMbzfyb57TMcBzXXq2Torf297NN5arum1Rjs6seoyKt+rpe+uq06lTZ6OgliTjqlNUsPJlbHpMeIVGNseaOrLHzr8zhToftzNDTgYfXvHSrdLLB3PBeig4SaabuNY3u/Zjr7pby6GZkociQ5EtwKmbHr/y2haf0+i1gPqtgs1JFaqqyx0JuFv6Dr75rMeH0OzF+Zs8/1L0G0Xt0SBg8v4YMKY5dAgp+WOBwU69in//x46wD4C5UdoJAwc9JZ7A8pbHiXhG+PfRGe0et0mLyud/ddQzoeX0MB42aY2eTkwTY/b49poOvPAbWRjFNVuW5+yrO9FZztPQmc61kezoc5Hg8G2CS3rWm+lT3qx9En/c3YazYz4ZafbWxfL+Hjg2+X/26IzFVegM4e7l23km1WXd431WrK0wQtRlvQvD2gD2zIC/WQdbbJ8fjuUMOprNA2yUSBz2g7r1HVCLHTL5BspMyeaXt8MHTh0mhCx2757ORTwOxcNaEcx+gBnVZTI8zwG6XvL904s//8wCsqcVtrVTuX1ymeqy/caSRl/jsY2v0P3oAedJOl1gs7lyu8a0zI6v11/1FTO6ijIciM66G+cGZwD9f1z06Z/QwcGjeX6S5pu/4kOHHamg5l5ZZ3dCsza4v1ZSvvoyUwodFD1LdRn/g5dpnQ12qdMUuq6Vo/X/4zorPCe6LKgqQest0rJVdH+7bY229uv27WnpnU2xjPUZZ1VGt+gvn83FOD7DZr1eQhl1ioKAlbZd7c7Duj5mKv83f1QYPQT6PIdSju6lqZ9S4Vyauu766gTulziOdR/X0GZ0dXUJuelh2Ocfng4x0GHyjWFU2Tm6x1851SUQGk6dHb3u5l5RdP73wceyTbJx0rylQvRNVsLoAPZqdBMhSvKtYhzZblOqhdCiR3PR9cPTYYOaG8ASOWHWBh10otmD24HU1dipbrLaipzWTTVaWp0DfDpJUHSVWiK6hjzJETEemiq1nfQNfoTo53Pk6iTnQsNhi7KFgljQrecOQNYJzPfiogH0BTge1Hil1GZuUvV31LKPBfd+/nYiJgHnbFcEiVCvK3XbTKUePSYHJ047Eso+/gr6EzzFpRlj84U9eoWRY36bYgGtS+iZXR56NZpg9YnIiZHM1++is5+Ho4yL9+OMvCvjpJablqxHrWst4j4A2p/86MB52aZ+WBTIqnpSzl3VqlXN0K307oPHVztERHnoGuYZ0fB279n5peqLO9hqNuf0KDjz2jdvYa24Ssz84iImK+835N+ckDdhtomF0Xr/E7UR34K9ZfToQH1u8BrmfnVivXoOiFWyRn0bml7q6DA6BsokPgCOvB/ITO/VaVOY6jfUPruyVEQ5g9o+vQx6ETCY2jbWRL1T79I3bqy1d+fC9g3M7ePiCNRwOcSNHNoc5ThvdJtjiNidjQDYvPMfD0iHkNn1Z5HZ/x/nBUTWdahLKPJ0b51HrSNjEIB0nfQQcMr7fZPJf/FzCgQ8lcUbFoZBUnmROOlh9FvnKdV/xYRC6DlsVpmvhsRa6PkdWdn5n5Nn6ucHDsiTkQJQo+JiJXRNrcx8LPUrfiIiAUHawN1Kdd3/wYF+v6GgiALoinXC5bXns3M7Ye7LqU+jfX2Ejqp9n3Uh1yOAokfRn3JNyqUNTmaUXk+6lOPRv3/rWhm1IdRO/tVhf3b6aiPPrZs/8ui3D/HZeaFodsyzlehnPmBozPzk+X5w2j229Mo38YhjTZQ4ff1zXFA6ZMWQkHvl1A7mg+dsJmxPF6v0o7q6rvrqlNTmzwCtcN5UADir+hyvNdRe/o2MH8vxjj9ou19cO2DUsmDOgkabIIiU9ujM/OnA+dFxAVoQ38J+FHqPr6DlTMdSjh1fWY+hyJgK0bE1sB+EfFOZl5Li/sQD6MF0cDxXjRoPxAl+nollLH/aeDXPQoaNCfD2R2dWdwXncGonFhpGMoamNn1UjSI+WRE/BAtu+NaDTyLVdBUybXR1Os30QHj6ahzXQs4b7BBdWY+FRGfKkGDqYCTs2TwjYhLgf+LiAUy85F2v6l8Zx10hvub5aWfozb6LLpn+0GZeQM9bJOZ+XhEzBIRN6Adz7/R9LSb0EyNB9C09RN7eNA4MCNvoMDTJhFxONpGWtYndKeDvVGSv4nRAdpJKFD3UHn9aXQQUqU+H0fXWe5cXhrSeouIzVH0/Ufl+SFogP+DsvOP1D3uh/0+5ACZ+VwZ3J0aEW+hKbhrooHiCuh+3Q+gA7Zerv+Po33AjKUeS6C+8l10v/Zd0QCkJ/1kqVO3bXJ9FMi8As1cugid2b0JtcvV0AHaxRXr0yoh1qmMToh1ZQ6SECsiVkMzw/4dEW+g5LUfiojtUO6G+9FByN5V6lTK7KrvLtvuj9DMvKnQAcwb5ffNh66PvhVNE//PIPUIFNiZKCJuQ/v/fcsJjCcjYh+0Pi+v+NNeR/k/fh0R7wEvZ+b3yt+6BvhqREw0WJ3q0tS/PY0CRJehuxWsh6aJv14ee7YpZx2Ui+IlFCy4E82keRotm9dRv3dMZr6GAi+tPIESFL5b+rHLy1ju4IjYKsudYaoGDYp7UdsBXXJ2FAoiHRQRy2XmXj0KGnwabbv/QQdBxwE/QW1zRTTueQP4RiMAPMz1aay3e9B6uhhtx8sw+g5ID6DbX7Yra02U22YhdGZ5ErTeJ0Tb2idQIHLQ7a2UNREaY0V56Vh0CddfgGMi4ieZ2QjgDSozH42I/0TECeg44JVGEKTsk1aMiLPaLet+Og5oakfXoeDHGYxuR0ugmRV3oMsC2pVVS99dV52a2uSjaIbKg6V+T6AZBi+itnpeZr6B+prxR/bBtIdx+YGSKO3Z9PyzwA6UhFgdlrUNip5OOuD1HWhz3+dh+m07osyiP0Qd1dnoLNFLaFByARWSPdZUl1oSKw1DWTujs2cbowRtx5b1tXx5fzlg9jZlLIk6rq3L80CDqNfRQHsTdGeAbpbf2sANHX5nGRS42Kf8e0Z5fSoUQNqlx+1xKTRd+m/oIPoddJB4AJqRcSCanrhED+vU2Ea2K23qbJSIci50Znb1dvVBA51DyveWY3RG3w2aPrMrOoCpUqcl0BTnA0tZvxvKekO3MVu5/H9CdFB8LTpz2rP13mJ5H4oGCI3lvVl5ve3yHoY67YsOGrZDg8yHm9rA19FU4bGtTf6RkgMBnSnep/RFewyhPrUkxEKDy83Rgf4GKLj2f5RcHmjwWPlaVOrpu29Hg91fojO8z6JA2kfL+9ugAEAny2uL0oZuQn3exnTQd6Pp6V8s63vXsr73a3r/a8D5PWyPt6N92IKM7t9ubCyjDsq5rrTjrdH+7Ew04+jips9sBXy/w3KD0cnrNkN5czoa16AZWQuUNrABcFjTezOVttqrSxRuQ/eb/wYK0rxS2uXXyvu7oZMKvVr/jfU2AZplcCY6O7zpENvSHmi2wc/ROPRlNFO043EJCjYcVraRA5teX7qU3zaXAApWbFK2taPLb/1+0/s7opksVeu0DeXW2QNe7+lxQGk7G5X/T1F+298a7ajDsmrpu+uq04A2eQIaR74FPNr0meVK+T3LS9IvjxGvwLj+QFGpRxid6fs2dEZ+wdJBVsnEOj+6lnI5NMh7nnLtXXl/ZzrMu1DD75oPRau/WJ5vjgZHz5WNbjpK8rYe1KW2xEp1llW+01VmV5RH4abS+T1GScSEDtQWQUGaJ+giey4a+N9JZwk/lyjteXU04+BcPphtelfg2B62x9nQYPPZ8ltmRNMut0RnCU9D0eOeZb2lhizRZTs6u/yuW8s2thzauZ6OdqyzoGvJl65Q3mdQwGdCdPBwHiX3QyfrDZ0N/J/BM4rGn4sGMT3doQ6yvC9Dgc2Ne1mf8vd3RGeB1yjPJ2tqkyehg7jT+mAZVWqT5fsToEDWz2hKxIfOqv+VpoPQDurVVUKssg38oen5JOjAeDs0yK6UR2hAmd323Y3keV9DuT7WRwcb25f+aGGGkJW/LP+50EyPv6BreD9W8buzoIOp+cvzidCssavRfns3dHa3J7kNyjL6HeX646bXN2gso4ptenJ0FvEoSiC0vLcGupyjsf3t1O32hgKBH+/g87ujA9nGXXDuQuPANdElK9uiSx6He1lHaTcvlnZzJgpmrIdmZ72G9jWP0rRPGOb6NNbbmgPeWxVNx1+jw7Z0Jbr19/aobz0PBUga29uuVNu/fbKU8+VSv1fQpVvzlve3By6oUM5sKID9m9LOF0WBor+iy9a2RSeFlq5Q1kJoJs2H0ZjwcZoSs9Kj44Cy3iYq29rX+GDC8GXQyYMtOyiv6767rjoNaJNboFwZq6Hg/lfKulqjfLanAdZ+eox4BcbVB023VaGczUXJnr7Z9Pot6FqkwcqZrXQyV5eO52Mo2nkburbyF2ja6zI9/G2zlfo8jK6x3xndXuy2UpfKGXBrrlctiZXqLIsuM7uWDmsfNFPlSXQQcixKkrhS6cieYIgHaWgws2hzu6y4/m8qy+d6dNnG5MBU5f1p0UHu0j1c943ltEdph79Dg/y9ynK6Ck117nWb7CpLNNqR7YsCRz8p7en88lvvRdOgf4imubcra2o0bfd2NJ1znvL6ZOXfGaust1LOM4wOZCxdXm9kG/9sWd6Vb581XMu7LL950EFRT/skNCD6GzprfgKjB52TlzZwKgq09joBXR2Zy+cr/dAXUICuse7nLNteR2dP6TIhVqn/yWXbmKPp9VnRQcNOndSnfLfbvnsWlNvouqb2uByaJXBAedzRbntr8zc6uvUqH0ywuDLqw39TlvG96GBrjR63x33RbJXm9jhJWT6Vg1AoSPfPss2d37TMv4LOGs+M+r4hbW8MIXN6aQMPoWn4Z6Hp1kugKdRnoOunr6RHM45Q0rzfoLO5v0KzQn+BZmhdioIsK/WiLk112rKsr4F3Lmist0ptHI1LnkX7uItQEHuO0lcdgGYb3NZueyvl3I7OJp9d+pA1UEDl92jsdTOwZMXl/d3y/z1Rro3foEDpFeX3faZCObOjMffZ5XcchgLAL6MAxC/L9jvob6t5vX20tKOPZdP2UV4/jupJw2vru2us085Ny/ZAdJefb6HjmxfRLLKeBVj77eHkiMMkIn6BdhL7pq5FIiImzZLLoFxHOlNmfqlNOScCD2XmwRGxAeowVsjMV0PJ+95G1ydWui69Do06oevzfosioBugs9/vAO+V19fPismauqxPLYmV6i6rqcz9UOd4LXBvZj4dEZOgHccnM/PNFt+bE50V+HgqL8HjpW7/QDvAV9AZi5czc/eq9WnxtypfyxgRxwOPZeaBEbEvWk7vogj4Jeis9qyZuUM3daoqmpI+ouXzEApsnIeCIv9CA7Setcmmus2IdjwPozPxz6eSkc3RSX0i4lfo7MdZ6EzvsegM73Uo58GobHONdGZmROyCpmy+hM6A/gVF1XdA15TPPdh6G0M5L5RyrkOR/e0z87WIWD4zb233u+pW1/KuqS7Ho8DeCehMyPQokHE9apufQ2c1Fhub2mREzJCZL0fESoy+Nv0C1D+ugQav61esS63JDEvegZXRpVtHlNcOQNN6h5IQcUh9d/nu9mjG4cPogP0dFByZDu1TpkJJFr/Tab2GKsacYPEKdHC1NnBQL9tiU72+hy4j+GNm7lVe+yGakfn1imWcgKYnr4Pa5Oko2Lof+n33oZwsHbeDoSp1ui0zjyp9+KOZ+dPy3vKojb+fmU/3oC6j0EzKCdGB7/+hccQxKOD3KeCZzNxkuOsyhrrtiA7uz0QJoP+J1ts8mbl1xTJ+hU6uTICuR38djQPeQCcz5kfJXwdNsNgoJzMPiYg9UY6VR9BU9SnRSYp/ZuaDbcoZVX7DqMzcOyLuRfuCW9Bs0VnR5Q/vV/htR5U6/ajk8fkJ6kteQNP8XwDeblenukRJChoRnwX2R/v/X6Gx1lYoCNRRO+q2766zTmV5z4kCDpejGWP3oFkr06K+5Y7MvLqDnzjOcOBgGETEimhAdi4aOJyRmQc3vT8POtA+ITP/OUg5c6AzAVtl5pPltaPQQOqHoayfy2fmJcP3a8ZYp9+hDvEeNBC+DHXWv0Sd4erAg+066JrqM6bESkfwwcRKkwK7DnZgNQxldZ3ZtSSM2QPtsP6FzuJtURLGbIl2hHcAn2tXn7pE6zsz3IWuB3wvM78dHWSbrqle86Cgwdtoilsjs/u1aErZg2hQNOxtstSnqyzRZTvbFP2mh9FgYxM0KF4IzfB5C22DP6naB0TECmjQsRdaVseiHeSOmXlO1YRog5SzZ2b+X5W61Knb5T0M9ZkVDejOQVmXT0YzNSZmdHLdF1AfUOnAqIY6ddsmF0MJyl5EA9+fZ+bvI+IrKNv4jCiA+K3MfL5Cff6bEAsN7i/PzLNL//ZFtPxmBL7Uqk1GxMzogHdGdDD0T9R3fxH1Rxehfe2amfn3dnUqZXbVdzdtu7OhGQZfQ2fVF0ZB1ltK3VZBd1N6r0q9uhURgc7kH41mLT6H7oLyenn/apTYtGqCxW7qMqb+DdQm10BnHz+GLhV5apBy5kVT0adCAcxPoRl4Z6BLAe5H7evzJejTs/1StL4zwzmZWTlBZ011WQidKZ8QBfYfQkn6/ozawpvl9a0y8089qM+8aL29V/72H9HlEoeX+tyJZmZ8vl1QpbSlzdA47TV0Zv8i1M/Oic4ML4G2xWcGW/8VDvYXRv1b1ZMsi6Lx8rNoyv1qTX/nL2h/eX2bMiZE28W7mfnD8tph6MTRYsBfMvOYKvXpVlM7eh3tz05GgfDvovX2BtrHbJOZz7Ypq5a+u646NbXJ99Es1XvQ5WCHo23jKTSL5QeZ+YvBftu4zoGDYVACB2ulbkf4MXSN20xogP/HEl29OjPvrlDWEsDjjTMbJUq9ezmAPBddZ9WTTqP8/XXQAcPtaKB5MYoS/xBNudsGddSf78UOOiJuR4PZ21GnsSe6bu9bnUYD6yorqmV2fQFNGW05aIyI+xmdJXoydIZyErQTmRedNVyp6k6sLhExdTlDORWaRta4M8PUaCrd9pn5eA/rM6aszHtRzoCjAcVv0D23e9EmB6tPI0v0O4PVJ5TZ/Dp0oPkPdM3fc+is9cfQwGoPlIOjo9lGoQz2z6EDxvvQ1Ou1UVLDykHIFuV8DGUtv6iTOnWjjuU9THU6FAWw5kbLad5Sp/XQGZBzgO+MRW3yAjRd9v/QTJ7D0WBtm8y8v3ymcib+Nv3bY6jtvzVY/xYR16JB51wo4Pt6+Y0XoQz606OZAo9VrFPXfXfTtjsZGgDPhfISrVL+PykKsu+VmVdVqVfdImILlBvhFRRoXRgdxKzco78/pv7tLnRgvThaRs9UOPj4C2qTL6J1tAqasn0gGvzvha6f/vXw/JJB6zYKzSp9tmmm1ofQGOMPmXly4/Ue1OVStJxuQGdPd0fB50vQdrwg2vZXGO66lPo01tvzQKKcAo31Nh06EfVGZv6jQlmNtjQLSkT4LrpjxAKoLU2OzjzvmJk3VShvsIP9K1GffV2Hv3cWdCD7NrpkeSF0G84VK35/KdQvvolmP2yUmcuFbuO7N7BDL04ejaEd7YwOqHdjdED8/Sqzlurqu+uq04A2ORu6VOZltJ/eCO23pyvlf6NXJ+v6UvbB9RLj4oOmbKvousit0QD7fnSWoWo5MaCcqVGndiBwyQj8roEZcM9CHey1va4TNSRWGqayus7syuAJY34N3E0HyQx7tD46vjPDMLXJRlbmL9Fh0sdhrk+lLNFoJsm1Tc/nRmfTDkFTuc+hXKvX3D90UL9F0UDqVuCI8toCI1XOSC/vHtRpfTTgfAadZe35dlJDm5wOXZ+7/IDXd0Q5eFbusD51JMRaDLiu6fnSKFHr4QxItNhBvbrqu1tsuz9FB+hPMIS7IAxTexhygsUa/vZg/VsnuXbmRCdgGs+nRsGd/VCwZyKGeBeEYfjNXd+ZoYu/3ZhVNF3Ta1OU5XQfo2/J2jYJaU31GWy9/ZgO8jW1aEtbogPSSxg9A/amIS63I0q7HHI5TeXNUH7fPUNZ3ijIuxM6IF6uvLY+cPMIt6M9UTB57g7KqqXvrqtOLdrk59E+7wpgxV4v735+jHgFxqcHmk73Bl0eyKA8B+/TwyRGZcc3WAbcx3pdp/K3a0msVEdZZRnVkm2W1gljZi+vndXL5Vyhvh3fmaGGvzlYm/wIGhD/vE/qUzlLdNnxXU7TXUlK2Wugg/SNy2ttbwU1yN/4HF0GIOosZySXd4/rdBGaAt+T7aTOZcTooOUEA17fGfh2h/XqOiEWGjTeAHx1wO/9AtofrdZBferKyj2mbXcCdJnR0yjg27OD9Iq/vaMEizX8vXb926c7WGenAD8e8Nqi6DLRzcprHd0FoUfLoKd1Km3ulwNemxbNfPkuJXFrj+pSab112ZY2QLO8HqeLoAhdHuyPobyp0eVKXS/vUtZdwKf6oB0dTucnxurqu7uuU5U2ORLLu18fE2C9tAFwaWZe3GU5xwCHZA+nOaa8hQ6sdy/TuRrvXVfqdDOa6tMzmXkA2nFcEhGHltfeQdPfZu9lWWUZ/QedzVsDJXppJJS7A+2gPxYRE1co671UUqDbgRMjYtfy+jMor8Hjnfy24VSuwZsbOL6Gtl1ZmzZ5Dbq93LsR0ZN+rsI2chawQbv6pPKe7AIsEhGHR8TKpeyr0KUgi5XPDXmKe2b+Adg6M/9drvvNkSxniH+7luXdqzqhs4yXoYFRT/LS1LyMzkAH/E+VvAYN76JbVnVSr1b923MosDJPhTKeR4nUlo+InSNisfJ7z0BJHpfuoD619N0ttt33M/MsFJQ4KzO/n5l/qVq34ZYVkrPV/Pfa9W/LVywn0VnqeSPi3IhYt5TzABqHrFI+d0BmXjYsP6ZDJc/ESNTpl8BsEXFXRHyh1OFVNNtg8ezhpYVV11vFslq1pfNQkPNsYLvM/PMQ6/oymkm7XjflNJX3emY+UdPy/ifww+zhZYG0bkePoZM1ldTZd9dRp4ptciSWd19yjoMeKgdYU2RJRtRlWRP0eoff9Le7zoDb5d+vJbFS3WWV8rrK7FpXwpiR0KvrNVv87RFtk3XUp1xDOQ86i/IAyiHyYbQTnRBlrj8M2CQ7vMZyXNdv678f6zTENjkl6h/fR9fXXoiuG/41mj13J7queOvMvLlCHbru38p+dFqUIf+hiPg8Otgcha5tvhplPt8k2yQeG1DukPtub7vt1bWMSvBm2fKdR9G+exXUhiZBwbB90GU4ldf/uCYiJkPbWqLLkh5CAbGD0LI7B+Wk2jwzb+xBfWpbb97eeqfOdlRX311XndyXDI0DB9ZW1JgBt6b61JJYqc6yor7MrrUm+xpX9WGb7Lo+EXESo7Pb/xlNj34aHbisg64z/0tmnjuMP2Ws0G/rvx/rVFObPAUN6l5Ad/FYBjgtM08N3YXmHXSHgXsq1qnr/i0ifo0SD66GckecjGY9zIMyp8+KZvadWrFOXffd3nbbq2sZRcRv0fpfEOXXeBbdVvAOFOSaCLgze3B3iH5Wtt1J0MH1g+XfCzLzvIjYDB2oPZ2ZN/SoPrWtN29vvVNnO6qr766rTu5LhsaBA2urzgy4NdRlfuCUHJ3pdm40uFsVeDYzfz5CZXWd2TV0u7PjM3PV8nxpNNthPpS45Y9V6zOu66c2WUd9ImIR4NzMXDR0b/utUFbw54Dj+m12yUjrt/Xfj3WqoU3OgQZxi5fns6CD+03RoOyADuvTdf8WusvQWZm5WERMju4ssgpwDcpS/k6nM5+67bu97bZX1zIK3eLwwsxcpDz/FLACCvj80WeXJXR74kualtMyKOi3PHB9Zp7e4/rUtt68vfVOne2orr67rjq5L+lC9kGiBT/690GNGXBrqk8tiZXqLIv6MrvWljBmXH70YZvsuj4o2/rV6HrTxmuzo+mW99LD5FX9/ui39d+PdaqpTU6Akipu2fTaRGiAdi6wdod16rp/K33zJcDMTa/NB/wezRKYdQh16qrv9rZbaTnXsoxKu74S+EjTazOg69zvbS5/fH6gRNwXABs0vTYl8NmyDa7Y4/rUtt68vY2d7aiuvruuOrkvGfrDyRGtnaeBJyLix6DkLmijOhPd0/rzvaxM1pRYqc6yUolerkFn8prLPg5FwT/VQTl1JYwZl/VVm6yjPpl5F8qn8eWIWC0ipsjMZzJzdzQV80PDVvuxT7+t/36sUx1t8v3y+Z0j4uiImD8z/5OZtwJXoVwEldXRv5W++RZgi4iYq+QleCwzP19en38Ideqq7/a2215dyyiVZ+h0YLuI2DwiZsjMlzPzSOA8dOZ5vJeZb6Bt6usRsVdEzJ6Zb6Zm9VyArg/vZX1qW2/e3nqnznZUV99dV53clwydAwc2qMxMasqA242IGBURC0TEUui+2seg+2J/LSJ+G8qmuhM6e9SzspoMObNrREwYETNExEKpbNuXArMBX4mIgyLik8C2KAP5eK9f2mSd9QklDfoZujZvM2DTiPhEmYr5KXSdudF/678f69RtfSJizoiYOpVBejPg78DxJcC6ErplYaU76NTZv0VEAEejKaX7oTsdLFneXhtdgtGprrJye9ttr45lVPbbE2bmseia9mVRUGvriJgW2Ah4bZh+wlgjIqYuB2VnoHwmkwMHlYOseYEt0LinV/Wpdb15e+uNuttRHX13XXVyX9Id5ziwMYo+yzYaNSbDqausqC+za63JvsZVfdgmu65PRGyPbmW5Uvn8GcBa6IBlbjSV+vbM3GVYf8xYoN/Wfz/WqaY2+T3UNy6OEio+gZJFvQ7sju6E8GBmHlWxTl33bxHxRWAOlMTq8sw8MyK2Aj4D/Lu891hmblOxTl333d5226trGYVu17kQmu58PAo+zVle+xgwDXBDZu4zHL9jbBER30FT+VdDy+l2tL1OhQ6qJgJuzsxDelSfXalpvXl7650621FdfXdddXJf0j0HDmyMoo+yjUaNyXBqLqvrzK4xDMm+xlX91CbrqE8o+dy1wPboAGY/dEB1QGZeUT4zI0pg986w/pixQL+t/36sUw1tcmZ0V5l50AH9WsDCqF2elpkPlDM+71asT9f9W0Q0rkX9Ebo7xB7orOJ25XfNinIlvJqZb1asV1d9t7fd9upaRhExG3AT8HF0K7dvorsgnV3OPBIRUwP/zMz3hu0H9bmImBVt60ugGT1fQAfT96Bt95WImCwz3+5RfWpbb97eeqfOdlRX311XndyX1CT7INGCH/31ABZAZ5Qazz8F7Av8BFh1BOpTWzKcuspCA+vmZbQM8GV0G8cvdlCfNagx2de4+ujDNtl1fYAtgT8MeG1z4EngCGCSkV7u/fLot/Xfj3WqqU3Oh64TbU4WuDiwf+mnZuqwTl33b2hwd/qA17ZBB/y7ABOU16Jinbruu73t9m4ZlXZ8wYDXPg3ciM46TzHSv7UfHmiW0MXAqKbXVgVOAS4Epu5xfWpbb97exs52VFffXVed3JfU83COAxuTd4BnIuIjAKnrXH+FblF1bEQs3svKZI3JcGos6xXg4YjYoJR7B0qoeAXwzYhYsWJ9rqLGZF/jsL5qkzXV51zglYj4bzKfzPw/YEl0BnTG2ms99uq39d+Pdeq6Ppn5GBrU/Twi1iqv3ZeZ+6M8B2t1UqGa+rffA2+Hbp/bKPcEYBMU1JiivFZ1VlYdfbe33fbqWkaXona9XTmrTGZemJkrAW+hQNR4LzNvRpcn7R8RHyqvXZeZWwIvoUuYeqnO9ebtrUdqbke19N011sl9SQ0cOLD/kX2WbTRqTIZTV1lZU2bXiGFJ9jXO6bc2WUd9UpnuLwEOjIg9ImLKiJg8lZhtqSpljC/6bf33Y53qqk8Jot4JfCoidm0EItDlD1N1Uqea+rfngYeBKyNih6Z63oVmC6zaSZ3q6Lu97bZX1zJKXRbzG7R/3iEilixT10GX0SxVf+3HWvugW69+MSI2aBxkAfPS44OiOtebt7eeq6sd1dl3d10n9yX1cI4D+4CIGIUCgO9FxFdRwpC3UIbpc9H00u9l5h96UJfakuHUXNbUwFuZ+W5ErAOsDsyFztT9Dk2p+l5m/n6QMmpN9jUu66c2WUd9yoHTtOiewdejRJ0/K6/dgKZSz5uZqw/vLxk79Nv678c61dAm10UDudWBI1Gy2GXR5Q9routQXyozBarUp+v+LSLmQ4mq3gUeKb/pOJS88AQ00FspM1euUqdSZld9t7fd9upaRqG7d8yC1vN5wIvAd8v3X0fXR0+emT29tWC/iYg10MydD6M7RD2DksUtiLazqYC3M3O9HtWntvXm7a136mxHdfXdddXJfUm9HDiw/4o+yjYaNSbDqbmsrjO7xjAk+xpX9VObrKM+ETEJcB1wH9p5rY4OVg5A7WoxtCO7PzN7dsusftVv678f61RDm5wYTQPdDQ2uNgXuBk4DbsvMf4WSyj6Vmf+sUJ+u+7eynVwCvI0ukZgZXZt6bESshwaSj6Fg713t6lTK7Krv9rbbXl3LqJRzG7p++V/ouug/o2ump0PB/omARzLzyeH6Pf2ubLsPoPwckwFfR8v7h2gK97ToAOuJzHypB/Wpbb15e+udOttRXX13XXVyXzIMsg8SLfgx8g+UqfRJYBF0Vv40dGb+C02fmRqYsEf1qS0ZTl1loQHvM8D0KBfCD9G0p52B6ctnJqtQTq3JvsbVRx+2ya7rg6LcZ5b/T4mm2f0G+Auw/Egv83569Nv678c61dQm1wP+1PR8AhREuAP4/hDq1HX/BhwInFL+vwAKgByNbuU4yxDq1HXf7W230nKuZRmV9fLHpuejSjmPAxuN9O/slwe6K9R5Tc8nRAG7F4AdRqA+ta03b29jZzuqq++uq07uS+p/OMeBNSwD3JWZD2bmjZm5BXAisHtEnBFKIvh69u4WJXUmw6mrrLnQrcpez8x7M3M/1BmuCJwaEVNntVsd1Z3sa1y1DP3VJuuoz1/QNHBS11U/DuyAsgN/YXirP9ZZhv5a//1Ypzrqcz3wTkR8MSKmycz3M/Nw1B+tEBELdVinOvq3m4AnyuceQbdvPBSdqfpqh/WBevpub7vt1bWMLgNeiIjpSlnvZubX0FnHT4ZyEpkSer4WEStGRGTme5n5HWBdYNUyk7KX6lxv3t56p852VFffXVed3JfUzIEDa+irbKNZYzKcusrK+jK71prsaxzWV22ypvo8CiweEddFxNKljPdQsraVImK1Yar72Kjf1n8/1qnr+mTmK+i60/WADUJ3P5g6Mx8C3gdW6bBOdfRv96JB3VkRsUgZND6BzhStHRFLdFKhmvpub7vt1bWMngTeA86PiOUbL2bmxcCiVEw+PB54Gk3D/j7wCdAU78y8DU0RX6PH9alzvXl7650621FdfXdddXJfUjPnOLD/iogV0PSwO4E/Aq9k5tMRcS3wy8w8vQd1qC0ZznAk1omI6dE9zd9A9359JDPvjYirgRMy86RBvlt7sq9xXT+0yTrqExGTov727fJ8BxTxvhU4GAWwDgSW6PEZ9L7Wb+u/H+vURZucE13fvxC6dnRJYC/gVTQddCqUGHHZVDbqdvXoun8L3fVmgsz8T0RMhbaRNcpvOwxdknEisFin28lQ+25vu+3VtYwiYhqU/4LMfDUivgz8GJ01PA6t/52ApcbXZQ0QETOh67JnQjNpPoWu/b8b+CswOzorv0yVbbeG+tS23ry99U6d7aiuvruuOrkvGT4OHIznoo+yjUaNyXBqLmsNuszsGsOQ7Gtc1U9tsq76RMQv0NnWe9FB2QVofe+D2uIDwF8z88xh+yFjiX5b//1Yp5ra5DXorN4EwEeBgzLzN6FLuuZAlxTcmpk3VahPXQmxfljqcjsaKD4EvAN8ubx+C3BFZv62XZ1KeWvQfd/tbbeNupZRRJyO2s4T6ODhaOBmYG/Utp8Hrs/M84flh4wlIuIitF28htr3YcA5wNYo2du/UaLPy3tUn9rWm7e33qmzHdXVd9dVJ/clw8eBg/FY9Fm20Yj4Ljq7tWlETIlyD3wXTSfaPTNv7XVZUV9m1wOBeTJzy4hYAGU93wyYHPh2Zj5f9beNy/qwTXZdn4jYC0Xev4Kmac+L2uFtmXl0REyXmf8Y3l8ydui39d+PdaqpTX4F2DwzG1NAlwV+jc7QbJuZd3dYp677t4jYG20nu6CzTLOgSyVuyMxzI2JWdEvISmdQ6+i7ve22V9cyiog9gHWAzwFLoDOCnwX+hu6ENEFm/ms4fsPYJCJ2BDbMzE9ExCzA0sB3gJeBXTLz2R7Xp7b15u2td+psR3X13XXVyX3J8HKOg/Hb9sDfMvNbmbkvisL9B7gHZa29NjOv6tUAnXqT4dRV1mbAPZl5eGYejK4Z/gcauG+Tmc9l5q2DBQ2KupN9jav6rU3WUZ93gN9n5rOp+8OfiiLoS0fE+h4IfUC/rf9+rFMd9XkYeCwiRgFk5u2pvAgnA98o0047UUf/Njm6ZOD+zDwM3TLxGWDjiFi59LWdTLuuo+/2ttteXcvoJeDSzHwrNcvlbGBfFMz6sgf6//UkmrZPZj6fmZcA66O7oIzEWKLO9ebtrXfqbEd19d111cl9yXDKPri1gx8j80DTvn4DTDfg9XXRtJ6JelyfWdGZs+uApZtenxgFAlbrdVnozNkpKPt2NL2+HNqpzVixnAXQNKmzgEWaXp8N3fN8iZFuD/3w6MM22XV90HXiT6Dr6UY1vf55lNxutpFe7v3y6Lf13491qqlNTgecjq71nKS81rhN4gXAGh3Wqev+DdgQuB+dJZqg6fUd0cHDVB3Wqeu+29tupeVcyzIq6+UhdB37RE2vrwxcDiw00r+1Hx4oAHY9uvRm4qbX5yjLu6e3KqxzvXl7GzvbUV19d111cl8yvA/POBi/9UW20YiYNCImS0UlP4kGdadExImhW4Gtj65VuqGXZRVdZXaNiAkjYqLUWbi10Jm5wyLi4FAW9PlRp3h/xfqM6/qiTdZZn8y8EvgSuh79JxHxifLWBSjRz1t1V3os1m/rvx/rNOT6NM0w+AewLUpk+ExE7A7MHspUvhAV+6M6+7fMPBfloFkN2DmU9BHg2FJOp9dVdp2V29tue3Uto7JePglMD5wZEZuVtx5CbejlOus9tomIRqK3x1B7nhC4OSK+UT4yJbpM6e+9rFed683b2/AbjnbUbd9dd53clwwv5zgYD/VbttE6k+HUmKSprsyutSb7Glf1YZvsuj4RsS7wcXQw9RDKs7EgOmBZCHgKuC8z9xrO3zI26Lf13491qqlNHoAG5c+iaftHobvO/AQlnWoknjqiYp267t8iYhV0JuieUsaaqK+eBQVCGklsv12xTl333d5226trGUXE5ijYdQs62/w8OkjYAQ38/w7cm7osZ7wVETujW6Pej5bt5WhGz89QAPAF4I7UZTm9qE9t683bW+/U2Y7q6rvrqpP7kt5w4GA81E/ZRutMhlNzWV1ndq072de4rJ/aZB31iYitgN3RNeNroJ3hLejWRA+W1x7OzAeH83eMLfpt/fdjnWpok18EdkVJFGdCiac+ApydmcdFxOyZ+UwH9em6f4uILwHfQtegfh7NEPg/dLcHgBWApzPzmg7q1VXf7W23vbqWUURsjdrPUehuR++h9XZVZl4dEasDD2Tmi8PzS8YOZXl/EyX2nA/1A7MAf8zMCyJiCbS8e3Ltdp3rzdtb79TZjurqu+uqk/uS3nHgYDzTb9lGI2I34M3MPLY8nwV1OOuj23lVHpTXVVbUl9n1QJSc63fl+cJo+tSK6P7qVS+XGKf1YZvsuj4RcSJwVmZeWJ5vjA6uXgP2ysx/D98vGLv02/rvxzrV1CZ3A8jMw0PJD6cqZW0GXJ6Zf+ywTl33bxFxTvnspeX5DsAmwE2Z+Z1O6lO+33Xf7W23vbqWUUQcDlyTmb8v05U/AnwMHTwcmJkvDMsPGMtExP7A45l5UkRMgM7Gr4jO0h6Xmbf3uD61rTdvb71TZzuqq++uq07uS3rHOQ7GP/2WbfQOYJ+I2CkiRqUyqV6AEqF8MyJmG4Gy6srsejPw/Yj4XERMkJl/y8xfoPwKe0XEVB2UNS7rtzZZR33uAnaJiEUBMvNsdAAzG3B0dJ61flzWb+u/H+tUR31uAr4aEV/IzPcy81XUz/0F2K7Dvha66N/KwA50icNnG387M3+FAhmLR8QPOqwP1NN334m33TFqWm919W93AvuFMq9nZv4V+GV579cRMVmN1R+b3Q58LyI+mZnvZ+bfgHPR7KODImK6HtenzvXmfWXv3I7GyOsOtR0NQ99dV9t2X9Ir2QcZGv3o3YM+zDYKrA78HDgc+ER5bVKUp2CaXpdFvdlmv4iu09oFWKG8NjHaWU450u2hHx791ibrqg9wCLBXaZMzlNcmLG1o6pFe7v3y6Lf13491qrFNfrZ8/mia7nSADqznHUK9uurf0NmgY1CixrmAycvrswHnNZ53UJ+u++6yjR6ADl4+4m33A8umMUt1AjS1uLGMph/qMgJ2K21oC2COptf/2lj24+uDD2ao3xIle/4usGDT6zcxAncbAPZAlwF1vd68r+zpevsScBLwvW7aUbd993C0bfclvXn4UoXxUETMD3wbXUd0Rmb+LiJmAK4FVs3MV3pQh9qS4dRVVkRElg2inC07Al3q8OtUjoRFUIf4kRzkOqmoOdnX+KAf2mQd9QklsJsfeBiYDO3IpkXX/U2ErrGeMTM3HvYfMRbpt/Xfj3Xqok3Oh3K+XJWZT0fEAsDm6LrUR4H/AO9n5hcr1qPr/i0i5kSXSVxTvvttlDH94vKR1dBAb/OKdeq6746IpYDHMvONiFgaDT6nRMtoEjQ4Hq+33XIpyO2ZeV1ELIeSTE6L+rvK/VtErIXu8/4QyrexIbr8ZkJgFFre02Xm54fpp4wVQsmer87MsyJichSk+SjwIeBfaNudvFdtsmxbC6P1/Q7KG7IQWvcTopM0ldab95W9E7qj2CrAjegOBwujJOLLA2+jBISV2lFdfXddbdt9Se85cDCeiD7KNho1JsOpuayuM7vGMCT7Glf1U5usoz4RsSpqKw+ga72PAE5EA6sly+Mp4KjMfHM4f8vYoN/Wfz/WqYY2uQqaffUkus3Vfpl5ZOi2jNMCG6OA672Z+U6F+nTdv0XEysCRwCPobOJnyrWt66FB5/Ro4Pi9zHyjXZ1KmV313WWa9H3o7jvnZ+bd5fXPosHoEmhwOt5uu6HkYkegmXwvN72+Klo+S6N2NugyKoGrh9BZ5huBazPz5Yj4KDrw/DiaIXhWZo63t98ry/UoYK3UXVQmQstnTjTjYyPUL1ydmW/3oD4fBg5FfdC86IDxWmAlYBoUQLyfCuvN+8reiYg10CzcO9CMrL+gywOeQP3s59HlZW3bUV19d11t233JyHDgYDwQfZZtNGpMhlNXWVFfZtdzqDHZ17iqD9tk1/WJiAuBM1NJfhZAt7ibF9g3lWvDin5b//1Yp5ra5GXASZl5ajlDvDewWQ7xTi519G8R8Wfgd5l5YkT8Cs1cWBs4JjP/PIQ6dd13R8S0QONuDO+juzBcjIIRpw11eY1LIuIE4C+ZeXJErIkOFNcDDs3Mszos69zy31fRGecbUUK0gxozR8Z3EXEI8FRm/jIi1kdBvmWAM4ATMvO5HtfnchQk/F2py9KZuduAz0SV9ed9Ze9ExG+BP2fmaRExO8pDsCQKpB7ZYVm19N11tm33Jb3n5Ijjh2XQRnQCmg52DhocbRwRM2fmX3s5QKfeZDh311TW/Ci6fR7wCzSD4Srg0xGxbGbe02bg2UgYcxv1JvsaVy1Df7XJIdcnZDJ0Jua9UGLORzLzcyiB3aHlzLGNtgz9tf77sU5d1accMP+jBA0moOQdALYp769aggmd6Kp/K9/7F9C4g8N6wHTA+cAxEbFnh/WBLvvu8jteRdvqkcDOwLLA1cA2mfluh/ukcdV9wKJlX/czdNBwKPCDiDi8tLGqfgxcAvwAzQg5BvgMsEA4iVnDzcC6ETE9Ort/DLAdCo4dFhGT9Koi5QzufzLzuDID4FJglfI6EbFk6Haugx6oeV/ZW2Vm2aPAJyNixsx8JjMPQ0kDN4mII6tutzX33TdRX9t2X9JjDhyMH/oq22hmHo4GoBtFxOoRMUNmvoSStswNTNFBWYehLNobRcRHuiirq8yuTTvMU8q/n4qIuSJi8sx8HtgeWDZ0Ldd4qynAchd91CbpYhspn38bOB1YC13n3Tj7cgE66FvJBx8f0Fd9UjHOtMniETQIm7j0ae+iA+vVy/u/REkDO3Ecum50vSH2by+iKcgnRsTvgGcy86DMPAnYAFg4IibusE5d9d0RMar0S48A30A5Gn6CZnj8JyJORZd1jO/ORgcKG6Pbd55ZZhqsBsyApgZX9SA6UzkT8Cd0PfK9aEbMDHVWemyVmX9AU/m/gW5Xd21m3pyZX0XrYcYeVucBdHA4UURMmJlPo5wh65b3f03Z7w3G+8reKn3+4ehs/Oci4kMlWHMLukRkNqDqmPQF1Hf/ttu+O3Xr3/vpom03BTzcl/SYL1UYT4Tu4T0nOmC/qnT8RMRfgY2y6ZrFYaxDbclwQsmuVgIuQGfRdkTX2XWapGmCzHy//H9L1Jk2rol6uLx+E7o3eMt7gEfEbI33I2Id4GtoWvFl5SMdJfsaVw1Y3rug9TRibXJA3fZAB1K3DqU+ZUe2O7AVcBbwW+BNlEBqncz81DBWf6zT7fIexjrNTv+0ySH126V/fBz4dyOoWQbjk6HrXf8OLJCZW1asxyRoYPYS2mb3R2d3Liofqdy/RcSUaBu5Efh8Zu5dXt8ZWLOcfaxSp1r67gFlNhI/fryUc1JEbJGZp1X5/riqHNhlROyEbmvZ6OtuQUkkP5OZ67YpYxJg7sx8qDxfECVXXBM4PTNPiIhPZeZFg5UzrouILwLPZeYVoUtCPoeCfReiM6tLAp/NzLV6VJ9Po9kmTzTPKAglE90eBTjXycxNOyjT+8phFhHroXV2TyiB+IbAP9Es3edREsLPZeZqHZQ5Cco/8AiwaZYEuFX77oiYBc2C+3eZrbIJ8DF0wN9R246IyUoQiohYGNgU5TRzXzLMHDgYh0UfZRuNGpPhlLJ+jjqvJTNz8fL6aqgzrJSkqXyn68yuEbEk2nl+Bzg6M98sO8avoLs7TFvKqZzsa1wVEd8HbszMiyNiHpSPYhE0EO15BtzoMkt0mWq3OYra35WZfw5NEd+3lPEWMA+wXVZI0Dmu63Z5D1OdVkLXsp8NvIIGsyPZJrvqtyNibuAEdFD+2oD3JkAHIOsAizYO4trUZ0V0G8jH0RT+kzLzFxHxGdRfTgv8m0H6t9C1tUcDPy0zJyj97U/RQPR4dCb7s5n5QLs6le931XdHxBxoWeyRmZeV12ZFBzFzZeZiVeoxLouImVCgAOD6zLyyDNJ3Q7kknkaZ1XcYrC1FxFxo/SfK4r41CjwdBayYmcsO368Ye4Qy1t8E/CEzdyqvzYr2k3Ogtv0ScHhmPtqyoPrrc0Jm7lNeawSRpkCZ9edEd3UZtC/xvrJ3ImJeFJD5UmNZloDtZ9GYdFXgMeBHmflIm7JmBL6Mtt03UD9wd0T8FN2O9wSUYHHQvruM944GNsnMf5bXZkBB2kWBxdGshkHbdum3f4CCIAui/ArnRsSvgZXclww/Bw7GUdFn2UajxmQ4EXEBiiqeFhE/Q2f2P4U6nDM7KKeuzK4LA39AA6HJ0DI/DZ0luH6w745PIuIjKODz8Sy3jisHMkujQeja9LZNdp0lOiLORNMA3wWmAo7NzKvLe4uiA9FITecer9WxvIehTquh6fsPoLsOLAz8A7XJWeh9m+y63w4lw7onM39cDjpmQTMCLszMx0NZthdP5SeoUqcrUFK0M9F6OwoFNL7aCAJUKOMrjA72/g3YPTOfKu9tXMq7OTWFtkp5Xffd5Szc2SjocT+wVWY+Ug5wMjP/ERETZeZ/qtRpXBSakvwKOmiYFE3/fRudHZ4V9Xvvpy4PHKycU9DMwsOB76N8Haui/vKl8pnxelkDRMRJ6IBoJhSQ27Kx74iISTLznYiYNNvk7Ki5PqBrzx8HvpmZrzUFD/YBJsvM71Uoy/vKHomIY9AlAAeHbsW4Mjqzf0Rm3lM+U6kdRcR5KJD9HAr2rwZcj4LJ66PLHW5t13dHxMnA3zLzoBKMmAbtT25pBLir1Km0oyfQGHtp4AA0a/ErqVw17kuGW2b6MY4+0HWe56JkUfuigfE+lIBRj+oQ6GD6RGBLYFTTe+uhaVObd1DeDCiS+uFS7lMoqcrn0eD/SGDCimUdAuxY/r9+WU53osHRrB3+zvXRWcuV0cHQq8ClI90G+umBDtC2K/9fFx1I3AN8bYTqc3lpO1OiiPrhY/hMy20Fnd28pen5zuhAZIp23x0fH90u72Gq03nAF8r/D0Izoe4CvjGCy2nI/TaaEfAM8OHy/BQ0wDqx/K5PdLKcgalRwGCVAa9/HrgC+EjFcgLYC903fF906cW16GzRUJZRLX13aYcLlrq9hoIafxypdd9PDzTt9/qm57eiRGi/RmcOZ6tYzhzozPTU5fmDaIbJb1A+iW1H+rf2wwMFBa9ten4s8O2m5xP0uD6faNQHjbtOADYew+fa1sv7yp6utwlLH7tLef4X4HsoaPcU8K0OypoLBQUaz2dAdyz4KXBgB+WsiAKHc5Xnx6Pg4SloH7xsxXJmAK4EPjTg9R8B1zX2e34M78PJEcdtI55tNOVtdMZqDbpMhpO6pvcm1BH+Fngylen392iq6sxUT9LUdWbXGJ2g5TngQHSWcFt0Vma+iDijTOkzDew/WqYV/xANQndDWeJPDGUA7omoJ0v0gugAj4gINNCbFU25A9g/IhZv8d3xSplt8u8ul3fddZoKHSxOUKZkb4sOcBpt8uRyJrvXuum3J0Kznz4REWegg7stMvMrqH0u2UlFMvN1lMdgp2hKMlj62zPQJQ+DCiXjShRM3TUzD0D991LAiqHbKTa2oarqysr9NvCTzDwUHdzMAqweyr/SaZ3GNQuhQHzjuvtAAaNfo4DSqhXLeZbRiT5/jvqBbTPza2hmyKLj+XJuWBwdkDXa3enAZyJiV4As+Tx6aEXK+kcBnqtQstGtB3yuSp+9EDpQ9L5ymGXme2hstWiZXXZ/KonhbigYtGBUT0D7NHBPKNdOY/x9KzrgXyoiNqhYzkxoptznI+JINDNsY2BPFEhcpkoh5e+fiZKfN7/+HbTf+3DF+lgXHDgYB0XEBKVzvpf+yTb6F5R34PSI2BeYu1zftBSwSOns2irf+Smapvod4JrQdXigREtTZbl+qp1UZteHgB0YetbiKUpZN6OzMDugyzD2yswFUcS+Un3GA6ejaeBboij2peWxDhqITt3DunSdJTp1m7yjy/8zNcXuGnSAtzq6JOO+YfsFY5dngeO6Wd51S12PfwEawPwKeCQzz87My9EB8XT0sE1GDVmiM/MmFMB8GgUd9mt6+59oVhQdBmhOQdOUH46I5inJUzJ64D9Ynd4t//4aeD4i5kOBkN+ggeOandap9N1/Ywh9d0TM0ljWmXkGcFNErICuu/8ZCnB/rNM6jWsy8//bO+8wu6rq/X8WIST0EkLvvQQMCSAiRepPUBAQAZUSmjSBUAJIEaQ3I1/A0BEkNKVJCS30XqQ3EekiSAuhiob398e7L3MyzNx77pR7JzP7fZ7zzJy27jr7nLPO3muv9a5zJF2WVu/HkTlfSnoCO27WLCnnK2wnA4c431TYPQhYsS+3cwWSTk3PdeV7cgd+538SESPKTq50oT5HK6V+SvqvpD8C+yV9Vi8cV/PeSTqP/K3sdkTEYhExE7aNn2Mn9JJhnhpwasDykr4sIy+9u2NwRYaLwuVtJ0l6EkecrVtSzg04ymRaYBhwlKT/yWkpL1PClqRrmx64GVgxIp4O8wFV8C7+rmR0N7orlCEvzVloFaZPSxjmeFyTGmCDBukyL55pWL+wbQVgLJ6pHQvcgx0HtWTNhz2NFwO/Lmz/JU53OC39rSkrnTdHQZ/TcRjvsbjTuCdwe43z58Fe8/+jJfx+rtTOTzX7OehJC05JWS79vzUeMD6FGZmXwhUxbmugPhviD2i02r58ehZ2xnwc7Z1f4WVYprCtf/q7Nh6MPoRZhpve/j1l6Wh7d6M+A9LfBfFM88m4AzMDHpDe2GB9pi38vwROT+iw3U42amBh/QHMCVDm3NnxoH7f9J7Omd6ZG5OcM5NtX7qKjP64IzdDYdsm6fzXSSHOlXenjuuqnLcsLin5TFnbjYklb8GEfJVta2COgzfr1aU3Lsm+DS3atzaOuQ9Yp4SclYAhuBJHxUaOTfdg8/QdKPVM9tYlPZMjsYOw8mz3q9hLzDx/EzBbg/SZGhOybgDMUtGnsO8APBE0QwlZ0+CqG1MVtlWeg7WwYzR/K7vuOboHV5GpbNsEh/ZfgPmF7ivT1gXbXUkvGoS/R3fgCaCNMLni/6shpx/mRBiOxyJTAbMVnqcoaUsq17ZJYduuyWZfiXkOnscOqKbfi96+ZHLEXoQw2+iDOPfz2rRtRjzAXUzSig3Wp0g+9TdMPvX3iBiInQpf4LDFd0vIGotn0a7DzLxn4FC6K4BFcSf5FdXwWofZvY/Gs3GfSNo3EbWshTuiS1OO2fXSdNzzeAB0Au5YfwS8IOmLTNDyNSvzQ5iV+bC0bQFccWJ2zLA8A2Zkf6lB+nSKJToiTsLP3LrAaElHFPbNgJ/1v0oqG8bXaxER0yjNbhRmeSsl9Opi5e5CnSqszJ+n3z5H0rhkr/bHdmZZ4OdqALt3dAFLdESMApB0Ujv79wSGStqxpE5FQqwvMdHrrZgDYgjwFTBR0utVZByDK1RcgUtJVtJ6dgOek3RXSmP4X0md5sROxomYkOt8bH83xfdrGTzr1K7tjogTkk4fYYLGcWn7Bul67otEQldGp96IZN8WwVE3k9m3tP9oYAnVKL8XZl1fAA8OJuDn+zJJD0bEKcAknPd+aVdfw5SEiDgRD8J3r3LMRpKua5A+o7FDf27gGkm/KdrxdMzWksaWkHUCdhqMamPfrNjpl7+VXYD03v4ID85PkHROYd8G2JZLjhiqJas9270QdvQLE/Be1q4QH38K/qbNBDyObcENkm5O+w/DDsqqZXxbXduJks4u7Nse98U/kXRXrWvL6Dyy46AXISK+hcOHJuDwnwMlPRYuhfSxEvt0IwezETECDw42B36FGdWfk7RJHTIWAi6V9J20/hyeqfgEh97uIunekrIuwmHT43Apmd9jJ8YjFQdG1GB2DVdRGCtp5bT+JA7lfQ9HHRwhpy70ecTkrMyvY1bmD9O+OfGzOo0aVKYyOskSHWZ/vkTSsNTxGYOZ4j/CneA7ImIV7MTq8+zQEXEdZgffUdKLadvUwKTU3gfj9KJfNVCnIivzUMy38RyOhpk16fuupFeboM+3qJMlOiKWxM65B3EH7bcyB0HxmOlwB75qedp07Px4wDA8rQ/CjrItsbPlMJXoOIRLrR2OSTGHYIfMh/B12kJdSO30Bg5VXRPfryuBIwvt1K7tTu/uhZK+HRE/x9+Og+TyuaUdGL0ZtewbznNfFviXnG/cnpwFgFskLZUmCnbDM5h/Ay6S9EBETKXG5+33KETE0sB5klZN6z/G5ar/hSNnGjoQSmlEN0haJszv8mfSoAw76S4tY0OSrKVw6dZV0vq6ODXtLVwV5o1wKdxX87eyc0jP0QXJtq2N066OqXxzOyCvaLuXw323z4APJY0t8+6mSboHJS0Q5pwZlpalcbWbP6VJu0mVPmEd13ZsI5z6GW0jcxz0Ish5R/tjpuirgcsi4kU8ozYhHdMQp0G0EN39h8nJp+bA5FMj6xD3HvB2RPw6zXZ8JWkrSTvh8OK1Sur0bWBRSQdIuhOHlR+KP9R3J68s1ZwGCR8Cn0fEyIg4CIfe/SjJehhHQvR5RMT6wOKSRgCb4brq61T2S3pH0n8a6DRorc8kErlbZSAk6WjMSNwedsKdKXAZwY3xoG0W4NiIWEbSg7kjBOG8ysXxgOORiDirMkBLToPpJB2LifIapdMgHOlygaQnJF0gaQE8G3ItDqF8pIFOg9b6XJj0eREYFy5hWcturwicIen72BG6b0T8JSKGpd/YFqdmlerw4479sxGxb/rtCiHWH3F01Q9KXFc/4CVsu2fF4enTAqMxOVddHDvhGuDzS9pP0i3AuTglaAbgkEhkXzVs9560vLv34ii1WyJi4ew0+BrV7NsxOBXwmWpOg4QZgdciYv50T87Dg8+ngeMjYq6+7jRI2B1YJSJmSYO1fXAawED8PVmiwfosBLwcjlRdDUcaXYIdPj/ETqOy2Bu/6/0jYihwBHbWrwxcExELSHoofyu7BLtgJyo4hexD4PLoAOFkG7b7IszldTwwPCJmL/nufgo8FhErpX7eA8Bl2PbuEhFD5FKsE2rIaevaLuvItWV0DbLjoJcgWohzKpEGYzAD8mBg8YgYFQ1krVcLIdalmHxqJewpHI1ni9aoQ9Yn2EGwMvbEF2fTJpEIv0pgWtKgMCLWAZ6Q9BNJe2OOhJqGKCKmTZEJxwBb4dyrsUnPfyX9vldSn96OlSjHytwodAVL9HGYcAicq7elpOtShMK9uLOdYQzAIZP7AEviaJx3IjHW4wHEio0cQKiFlXmbVtt/hVmZ12nrvAbo02GWaEkX49Kmlf83ws6aMyPiBpya1W5KQRvyJuHSXT8KE2IN0+SEWOuXkZEGjMcB80h6ENftfgQ7k+uNMHkXmBgRe6T1WXAI7JF4BquqszZ9H8+VdHLS7zVJm+EojW2ihZiyr6OafbuPkt9aSc/iZ/CoMIv6OcDDks7A0YKNHhD3SEjaExiFB2pP4dSQi9P7/xAN/p7IhIzP4Xzyg4HfSLpT0lnYebhpHeIOxf29B9O5oyWdLmkrPMGSB35dhxMknQj+pkgaifkj9o+IuesRVMV2P4qjDg4qKecjnN52QkTsEhHTJ90uw89EZaKuVvRal11bRhdBPYBoIS9du+CyYuvhzt/huIN1foN+uz+OeNieVJsVOwmewYPqaeqU9QsKpEA4YuEWXApsDeyBrJtcCc/yzVNY3x64ssY5AzHHwhKFbfPiAegJOKTrCWoQvfTlBc9aXA+s3mxdukofWlK+bgZ+2uxr6kkLDp2Pwvr3cKfxM+DZJum0AE5VeqZoO7Bjc1wT9FkwPTtPF21HGX2KbdvGvrlwasG2JfWYGjsdfpDWB+MB/u3UQYhVkFchejsSO2xfxXnTixVtaB3t9N2ky0s493ZE2n4QsH+dsirkXCslmSfWas++utRr3wrHz43zko8DNivsvwc7JJp+bU1u136F/wcCe2Fna6X9bmnk96Twu7Pi1KQN0nu2SuH+b9MBuUsDv0r/V2zCeOAnzb4HvXEp2Lb5cKTPmcX7W1JGl9nu9N34P1xFZwQmzHwE2K4Z15aXLnjGmq1AXjp5A/3BOQKzl1bYar+Fywy+S4NZonFkwPV4Fu3YtG0mnNtWYdYfUFLWATjc6Spg98L2WTE54uXAL+rU72u24sK2wOWmqjogUgfoQxxKWGTlXg64EDOO79fsZ6LZC21XHZg6tXNdrMxdpM9A7NiZrvDhmaqgV6f1wSG+DasM0dMX7JibLf0fxb/p/09oIJN2eiaH44HiwLRtD5rEylyw21MXtu2KIwOuLqtPtQ4TZtS+uw6dRmOHyuPA4WnbrDh8+QgcubBVB651KB6c79OBc/vhWe4KK/cAHC5drNTwHIXKPR34jRVxNEJDnsUpcekq+4bD1+9o9vX0pKVoAwrbdqZGZacG6bYPdhreQY2JlRpyilUVtgfGN/va+sKCSS47bNs6Y7sLMqbB1cu2w86wm4BTSp5b7fvWqWvLS8eXTI44hSPMyDuHpBFFwpKU/xOSno0GsUSnPNSbJC2d1sfTQqwzAXMd1KygUJA3L65DPh6Hx06FmWE/kPTbstcVbTCOh2vJT0oEQKNw7ma7IfQVYi3Mh7AzMEjSQa2OGYCrRPTplyqqVB0oHFOKlbmL9Pktji75aYEEcTpJn5XVp3BetL6/ETEXDn2/TdJj3XYhUwgi4jgcGbQiDm++BRNHvpn2/wjPav+igTqdhNMlFsIznqMq9z8idsCkrZ/K3CeN0Kdot1szlv8Up9K0yxLd1nPYxjFDgC9UolpJG6RoV2Bb+wkuL3thSUKsynvST055qGxfXKlqRj2keGFW7kVxqOxj+BtwvcxzUOFvGCaHr9aSNdnvRkTgb+RX9ejUG9HefUv7usy+JZ6hj1Wj+lFfQaEf8vX7HBEzYUb7+5v1PSk8DwsC8+PKKq9K+ncd537jnYqI2XBkxfWSHu0W5TMqto2O9EXbsJMdst1t6ZSei0GYZLHDcqBj15bRNciOgykYYTbtsTiUbFJE7IiJAt/HIfMXFzukDdBnOeAkTGayBCbT2gCHzG6Ba7XfVFJWhbPheFz66zRcmeEYHNEwSlVKgRXklGEcnx57xNsl6YuIs3GZxdHJKXM+nq3cQ9I7bXW4+iKiRNWBButTSW1ZQ9LElF+/Ch4oPg1crnJM89Nip1BxMNSuM6GvIiIWBf4iaUjqIIzBBImXAGMkfRYuWSlJnzZIpyXxbNmQ1CkfgyOHRjbjna1ityfgwcIlJWQcj0kMTys+e5WOXdRZJSAi1gL2w6VuV8SOgx/jWZ0f4OixB2rImApHmkxQSwnO/pjMdlK9NjImZ+UeiGetWrNyz5jkt/kspd9fAHijoFPRYdDn7XY7960ffke/6qx9KzyTfb48MUBEDMbP8MNKZJ5RKFXbDCdWOw7xqTryjiS7P6Gtb2X6P1cw6UZ05BvQjp3sB+Y8KPsMhCtK/U2FKgmt5JSyAemb+JQK1cla2aT8DDURmRBoysZWOHxzgYhYHYcTXgK8gAfqyzVSGUlPAw+kZTdcT/uJ5Cx4CvhJHbImJUN1GA4tfhMYhEPKn8EOhTKoxTi+HTC4htNgIHCVpNFJt+dw5ME7tBC89OnOZwFVWbmTY6FhSDMkTwLzhpmdN8dpLs/j3LuqBE0RsWREnIv5Qg4Ll5AsOg1mB06PxOqewVDgxfRhfx+Hur6Oyesui4gZJH3SKKdBwuaYgR9JE3FqyjCgci83CUc3NQrt2e1nge0iYsVqJycH7TY4fH+R4r7UqRoEXJmcXaWg9knRzsT5qD+qodPymP/laODeiNg/yf1v6jAOBk6r8z0psnJ/oW+yci8r6eMqToPlcNrHwZgocuqkU6XzWdFpQB069SpUuW+TUhvNQQn7FhHzRMTSaQAyGZKc2YEr6nkmeyMiYgXgGuCXuD36g9uo0N71vied0WdwuITpNxxD9b4jEbFMRFyK06wObCVL6bcO7jLl+zBKvm9Xpr5rLVnt2clJ9dju1Kf+LbbbRX0qcubAz3xVncJE6rvj0rut5XyV+mClri2jm6AekC+Rl44vOC/2BRxSumlh+5HAAQ3Uo5i/PAgPGscDP0zb6ibWoSUf/UA8w/9PHL0wAJi3DjlzF/6fDTsjHsaDidcp5N+VuU5a8uM3pqWKxWRt0FeXdO8r9+1nlfuf1k8iEZo1WKedsTPjHGDHwvZDgaNrnHsLJojbCjuedm61f0ZgaLPbvacsmEfiLExquhVwKYmfBDts5muCToNxGc6iTbkYOxWXxjn9DX13O2O38cBjJC5j+TwwvI1jFqtDl06ToqX3ZO9kn7+b7OvzJN6YJHv5DrTTHjjHdhdg+sL2Y3DUWbVzrwf2xZwI5+B0t71w9BE4/aFunXrT0lX3DTu9nsfEzHMzeX+g8r2smxCzty3pmazYw1OBn2Nn2IZp24zAtxqoz3O4D/TjwrbivZuh7PcN96cOxJFKf8DO4o1oIcleDPhOs+9Bb1jw5FmXvG9dZSeTnB3S/0tiXrNjK3Iqz0AJOeeRuMtwdOgoPPm0RT1y8tKNz1+zFchLB2/c5GQz02DikWlp6QTeCGzdZB1/nj4m9+O0ibLntSYvnCt9iHav8/e7jHG8ipz1cGRF05+JnroUnsmmVR1IH9iL0wd347Tt9mrvCA7Rvq+wviZ2hs2R1geTiPbyMlmHZWMcoTEGOKKw/25ggybpNnX6W3Ec7IUdWdd11gZ0pI3S/3XbbVwObXxh/WDgXBw1NZn8TupZmhQtdSyvBlZqtX07XHHmu53UpW5WbmBtzLdTWX8TO59H4ei3H3dGp96w4EHhVZ29b8CQ9B5tjp1af0r3bFDav2jl/7684Ain64GZ0/or2AG2F047GtlgfVZN37MR6Z0YR8EJiUPXSxEG4+pERbv0KnbWH43J8FZtdvv3lgWnbI3DEbydet9w+eGbC+sdspPAasBrwOqY0PaGZK+PIpXxLClnKux8OjKtP5qezx0x4fvxzW7/vGRyxCkebeUeRcTuuNTNWk1Sq6LHrMD0mFvgn3Jd12rHt5tLGRHzqYVcrdM55RGxCbCvpDXK6NQ677CSJ4tZ+fursaHXPRI17t9O2GmwToPVqvz+jHgGZHU8yzw9zufbrco5w/AH8UwASV9GxNWYw+HPEXEJcLIyIeI3EK7Z/GlhfS/gZ5JWaaJaXyMiFsEpLI9IWrsJv98hu53SGCZJejzlfM6DHSCfATup44RTU0v6X8HeLUhJUrRkC3fAUWZ7KeVtF65pNklHd0SvJGMaPBu2PHZGf4X5ZkZWOWdNTML3WERsAGwkafe0b1tgYUm/6ahOvQHpHq+LbdxuHb1vEbEu8Jmk+9P6LsCWOArhLMx1dKikcV1/FVMOImJZHII9B/AFsJSkldK+YTgybi81iAcihY4Pl3RjWj8MVzwYhwdvVwHHSLq7hKzhuFrW/RGxJbCJTEY8A7A/8JGk33XXtfQlRMQsuGJV5X3bHTvt6n7fwnxdA9L35Pv4vu2a9pW2k2FuoxF4Mm5NbA82Tft2wBHCR5W8vkVxpNmduDzx3mn7YPyt203S52VkZXQPMsfBFIw0mG3d+RwIfIBnjBqhwyrJQTCZXqlj/CHwtqTnajkNEo6LiL1SR7Qor5+kN4s5qiX0isq57RzyEu7s1sKA1KmuVKvoVxwgS/oyOw2MKk6DuXDI66hG6VK8/+k9+VjSbZgcc0vcSRtZ5fwfAEg6VSYLqnTmbgIWiYjNgFmy02ByRMTU6f34tHAPBuDqKrs3SadvfOckvYzzMQ/85hndj47Y7dRB/FzS4xUZkt7AnazZgd+FWctLIeUcr5FynCtEU1Ol9+U14AFJD9dwGozCM9TXYyfqa6mjWEE/zCdRNwq29st0zX+UtD52Huxb5bz9sEPl+bTpTiZ/12fC5L19FhGxL3AKnq38EN+3PQqHlLpvEbEPTiN5sLJN0lk4Tek1HLHyYXYaxL64QtSv8Wzs2Xi2v4KVcBpXo5wG6+GonNmSU500sFsDT4hMxAPKMk6D9YCFgfmTHbsBPxPIxMMTsOMvo5NIg+rFgQnhqjlIGoPTQl+njvctyRoAfBYRS8tcZEUbUMpOJjnTY0eTMDH7yMIh0+PUhVpyFgtz/gzEzrVDgBUiYu30jG6M35HsNGgycsTBFIg0EHu/+JGJDrJpd1KPYbjawVpqo3pDmMTkbFz7u+rLHiZouQl/TI+U9I9W+wfhEKotizMjVeQNxl7utliia7ZR8sQegjvzswO3Sjq/lT67Aic26mPfU5EGD69IuqI4k1qYvWx41YHUgflf5T6HSYQq5DpVGYLDzPvPY/LL+3Gt4CfSvjlwysUAPDv8bPdeSc9HuCrJQsDfC+/bVCQeumhSqbueYifT77bFNl1kUq+qT7Kl9wDrS3q1cP4McrWQIcARwK6S3iuhzwrA6Zg3ZjocSlpspzmBw3H97jZL3qZjHsT58K+kbTviNJVbcSj2BjjS5MlaOhXkVrtvVVm5k04PA2tKejV1OGfDKSovp3f7Lpzm8FRZnXoTUhvdi52hTwJb47D10/Ag72k8qKx635Kc+/Ds+cvALyW9XrxHEfExsHrFfvZFFNrpv5hPZR/gPdyfWQWnYY7AOdzd/kxGxNw4JeoBYEHcz3kBR5j8Mx3zEc5Nr/reFmTdj50Hh+CB42C52tQswG049720Dcj4JsJVZi7DZXLfwtwGD+AqRhX7W+p9S7IuB/6VZM2LnQ7XSXqj0M+paicLct7BNmAQTlO5VNJLqZ88HnNbVbMl82A+pIqcGbDDYQk8yTcBR5ztKemZateW0f3IjoMpCGkw+xvMWjoVHrQ+UxigzY6JRUoNrrtAn+txtYHzwyXGFsYe65sqnuqIWEzl6ohfg2eGpscdma0l/bXVMTVlFTrQL+MQzEsknV7YPxjnBFd1ZkTEndhz/jCuCLAn9qYeIunhiFgCf2gfbE9GX0CYlfs2nOO8d+s2TR+OI/EApNtLg5Zw+MyOZ0PadPgU3qW9MVHVO9jTfT+egdsTf+D+nSIX+jTS/T8F50Z+qBRWWNg/GBMlNszBVtJOVmxAI+xkLQfrHJiUql27HRHnYcfDieHKJFvhChaP4M7eU2nW6Pm2zm9D3nXAjZLGRMSpmHxqIzyrPy4NuBeu0Wn8BSY72z4ivoP5Fv6Oc7mnB44DJtahU6e/b610Wgnbgk8xD8Pt2Ekyq6T9yujUGxER5wOPSTo9Is4A/iHp5LRvPRyBMFHSi3XIOQ14WSkcPTm1lsYEm+d15/X0dLRqp9/j9h6d9u2OZ1gfVYnZ/S7SZwxO0zsuHDGyDiZp/QhP3nwArCzppE7KGo8JaGeRtEv3XE3fQUScjtv6hPTdPRlXHngNc2UsDqwm6dxOyjoR824MllQ1SrSVnKE4leAV3B84Fn/755f0qzrkrJDkPI4dG3dj50ZUHFsZzUVOVZiyMBrP8IzBXrhKOHUlbP49zDTdiM7warjc49/TbP5oTNw1CYfMbpp0KuM02ATPnJ0i6RjgImC3NOgozszVlIUN4J14sHoGcHxEPBoOgULSu8B+NZwGc+Lc3j9IukvSXzD5z9XA3hGxiKQX+7rTIOEI3OYC7k8OpK9TBdL2sxrhNEgYg2c8rsDe+Z9FxK0RsXLaPxtwW3uD2Mq7hGeI1sEEQYfgcnSrAsPkHPRtu+0Kpiwcj4nRjgEGRsT3I+K4cI45ONzx9kY5DRLK2MkDGmEnE44EzpM5MpZMbXRsRKyR9Pk3tkntDYbnwrZ1zrTpeBxKfAV+nndMcsoO0OfDaQVj06ZKWdL7gXMjYqSc2lNr9vNGWlJ4tsJEXafjmdRXgKfL6pTwO2rft/1r3LeiTlvjZ/OgJHsh7OQ6oA6dehXCYcXLknhb8DO0Q0QcCyDpVkmPlnAatJZzDbBzRByZ5HwlR2Od37aEvoE22ukqYKdCe4+RNLqBToOpcbTDjGnTTnim+TTgMcx5cU9Jp0EtWatiZ+IebQrIKI3Ux/4ARzqSbPMzwIvAUngm/gXsWO2srO0lnYjvXT1ynsDRSv9IcnaR0ygOq1PO47jfNQE7nlaW9FZ2GvQgqAcwNOal9oIHMXcV1r+Fw5SWSuvzk9h6G6TPopgx99e4pNMVhX07AIfVIet7tJTs6Zeu5TLc6ainVOI8uONYLE1zNDZcl1NHKTg8ID6z1baZcV70nmm9T5dfBH4K3FFYPw44vLDe6BJ3c6ZncVBh20CcW38xsEid8nbEg5eBOIJlJI5CWa/Zbd8TFmAR4JbC+kvYWXconi3Yogk6rQvcXVhvtp1cHc/irEbn2KYXxPno7wP3FrbPgsnM6n22f4+rJlyECSIr24ele9i/xvmR3os/4EHCOGCmwv47MbFVWX3WAu7szH0rodM9wPca/Uz2pAU7jOautFf6uyzwF1KVijJ2u4achlUp6elLmfZugk5LYef6jUxeNagfTmH5dhfJuh8P+pp+H3rDgkP1rwQuxP3ux9L2hXDE2tRdJOs8PKlcxg50t5yFcbpz1e9RXhq75IiDKQdvY7ZUwvmwT9LiIQbP0izfCEUiYiFMdvcn7F18AYeYVlCKDCXJWhCHkn4UETOohfBrdzybVprwS9JbuJ1Oj4g5ImI7HLp1FO5wr1pCn+UjYh3c8Z0nIp6NiBFJ/kc4BGvFtN7X83wmkrzSyWt8FbB+ClttePtIqnAS/Law7QvsNHibNINZiIaohfvSOfcB10o6BbNe39qFak+xkAkGv4iIyyPiXOADSbvJTOzHACvW0dZdhZeAC8MErf2baScTXsSD8/Vw5/pzSbtIOgwP3oeUESLpNUlb4MiXQwu7huOUqZfLyImIX6aQ0iNwmsPZ2NlWQSmSNhlfSNoev29LAOOT/dwclym9rYxOFZHA2M7ctxI69Zd0Zx069TpI+p+kf1XWw3wvz+L0q10jYngZu11Dzm5hlv0+jzLt3QSdXpA0FPMqPJIixObC/aNpJD3URbKmlvRwV+vfVyFHBhyF7fbHOMIDHPk7VHVw9tSQtbwcMVTGDnS3nCF4UrFP84j1NGSOgykAaaA+Ox5gv6oUZh9mfv8unt3bVtJGDdBlblyf9yvgXeBCSdemzt5/wxUW7kz6lCHW+TPwP3xtF0u6JFqIsJbHBF27yikG1WQtDMyFPZxHYsfGfZg4ZnxEHIRzW9tlUU/6XI0HmbPicPRFcXjrREz4tS2wuRKzeV9FuLTUTNjIv6AWEsIFgVMxsc45jTL4EbExzs19CIeFrgKMVsr3S/mXQyVtV6fczfA930IONW840WNPRERshCslPAfsnf6uLOmgtH8PnNO/eQN1qpRw+pqkMW3/IbaTj9EgO9lKn1dxxNi6OIrpubR/Tzy7t3UH5U+DoxZGyYzYtY6fE+eLbqhEPhsmEf0jzo+9FEeL1UXSllLJ5sXvyfr4mm+XdFfJ8/fDjOA3S5pY2L4Rvm9/pc771lmd+iLC5fgekDS+5sENkNPb0RPaKcxBdCCwIe73nCTp5mbLyiiHMNnrvcCBSmU1my2rp8nJ6Hpkx0EPR6uB+kfAZZIuTvvmxAQ0M+POYLezjUbEH3DH/NjUIf8dsIES/0CYVG72NKNWj6wNk6yNJf2tcEwZQsS5cfQDuAN6Dv4g/yftnw2H022qVoSLreSch6sDHB0Rv8bhsRNxVMX82Ii9XU1GX0Bq77/g6ItFgW1kcrbp5TJ8P8S1mzdVgUW+G/WZCQ9c38WzumdjErSLMVHTXTitokMOn4iYWdJH0WAm/p6KQnu/h8PBL5T0t4i4FoeKX4OdCVvUch52oU6VqgPfl9nzAzsR++Nw4ftwWP+GakAljII+GxQG6bPiifEJETEzHsyO6EgbpeubD9vL35c8p0iwuCywBX5/H8AzOy8Cj3dmVj7qrKARLVUQvifplXDd97mwM/lNzHkwmE7ct3p16mvoKmdodqqWQ09rp2TPZ8XjgVd7iqyM2kiRnptKuqKnyOppcjK6Htlx0MNRa3AdEWOBjyXt1gBd5sWEWtvK6QSksPS3ZFbduXFu6u2qQYZXQtZCwBKSbqkipiKr2EYb4zDVHygRPKWB7PySzqgiYz7Mg7CepM8i4gU8A/ccnr3+RNLhtXTpC2jlYDkE518HdrCMS06EFSU92gBdWldBeB+nF9yMn8VHMD/B+5Ie6259ejvaaO/3MHHoOGBN/AyA811vaKBe7VUdeBRHEY3C5Tl3al9Kt+mzTNJnCJ49vwFzHsxfLQKqi/WZC9uyP0jaL1zF5m5MSjUEp5kc2whdWunVXhWEWXA7fRf4QtKOjdYtIyMjIyMjY3JkjoMejDS4XgjnyCJpHM5H3SztnwMPvvdphD4yq+meOCS8ggtoydM9G5i3ltOgpKxT8LVXRRttdC0uKfTjtH8ezLJ9dg193sSzg5+FS5FdKOk3kv6Mc6++HRGL1dKntyM5WJbCzPUA2+Ba8OPwzODOAI1wGqTfaV0F4THMu7AdriH/mswUnp0GXYA22vtx3N474MHwzZIObrDToFrVgVlxadcRmDelGfocixmir8K8LT+VS8RWZa3uSkh6G1gBmD8i3sdRYaMlXYD5KNZI6V6NRntVEEZjnpxdgF2boFdGRkZGRkZGK2THQQ9GicH1+cA8alxZMYBnJX0CEBH9cd3ufhFxNDBA9dVsriZrOklVB/tQqo3OxDN7k0rImpj+fizpuMKu4bgWcZlykL0aVRwsV2OCzEWb4WBJjoqxmAvkRTxAOxkTdW3QaH16O6q0936Nbu8qg+Kx2Ok3PCIWKOPQbJA+QyNiwTI2qYv1ao9gcRi2b680Up+UbvEu0D8iHsM8C3+W9IZMrDgcGK5MjJWRkZGRkdEjkFMVejiK+XBpcD0tnj1/CZORrd9M/QAiYjQuVbd2Z/JjOyqrO9soIqbDOcAHqgQBWV9GuBrFMZJWadLvLwXsBXwb16IeGRGLVnLMM7oWPbG9I2I1zOZ9Z1pv9jPZo/RpjaiTYLEb9fg5djx+gBm1lwD27yntlJGRkZGRkWHSqIwejCKJTpp5+W9EvIXDXNdummKT42xcZuzOZsjqrjZK5CwLAOdlp0F1JAfLaMyq3BRIeiEixgPzAAekzaXK1GXUj57Y3pLurfyfBsWnYH6DrE8rpBn/OXGlh2bbt0sx58K2wGmYOLJptiQjIyMjIyPjm8gRB1Mg0kzfNpIOabYuFXQlc3VXyOrKNuppLMg9DcnBsjiwvqRTe4A+MytXQWgYemJ7d6TqQF/SpycjV0HIyMjIyMjomciOgykUuXNVG7mNGovsYMnIyMjIyMjIyMjonciOg4yMjIyMjIyMjIyMjIyMjHaRqypkZGRkZGRkZGRkZGRkZGS0i+w4yMjIyMjIyMjIyMjIyMjIaBfZcZCRkZGRkZGRkZGRkZGRkdEusuMgIyMjIyMjIyMjIyMjIyOjXWTHQUZGRkZGRkZGRkZGRkZGRrv4/9LPz16B2aW/AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAHRCAYAAADqjvsdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACnwUlEQVR4nOzdd5hkRdnG4d+7y5JzzlGiShAkg0gSBSSIJEFUEBGQjBJlySgCkpNkBCSJRMlIFImCiqgofoBIUkRAReX9/niqmbPjnOnTM70zs8tzX9dcM326u6a6+oSqt8KJzMTMzMzMzMzMrC+jhjsDZmZmZmZmZjZyOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMbcSLilxGxxnDnY3yLiCMi4tWI+PMw5mGNiHh+uP7/QEXEohHxeET8PSJ2G+78DLWIOD8ijih/rxYRTw93nszMbOLlwIGZmQ2piHg2Itbute0LEXFv63FmfjAz72qTzvwRkRExyXjK6ngVEfMCewNLZObsw52fkaB8nx9o+PKvA3dm5jSZedIg/+/YiLh4MGkMVl/HRVOZeU9mLtqNtMzMzPriwIGZmVkfhiAgMS/wWma+3OkbJ9RgSZfNB/xyuDMxWP4uzcxsQuDAgZmZjTjVHtOIWD4iHo6INyLipYg4vrzs7vL79Yh4MyJWiohREXFQRPwxIl6OiAsjYrpKup8vz70WEQf3+j9jI+LKiLg4It4AvlD+9wMR8XpEvBgRp0TEpJX0MiJ2jojfliHzh0fEQhFxf8nv5dXXV963NnArMGfJ+/ll+6fLNI3XI+KuiFi8V5l8IyKeAN7qq8EZER+MiFsj4i+lrA4o2yeLiO9GxJ/Kz3cjYrKash+n17/XkPg1IuL5iPh6Kd8XI2LjiPhURPym/N8DKu8dW8rgwlI+v4yI5fr98hu8NyLuAD4OnFLKb5GIWD8iHivl/lxEjK2k1Rqdsl1E/F9oesiB5bn1gAOALUpaP6/Jz7MRsW9EPBERb0XEORExW0TcVPJ3W0TMUHl9J9/lpSiQdF3Jw9fL666IiD9HxN8i4u6I+GBN3t6bbhIRF/VOKyJuiIiv9XrPExGxSZPvwszMzIEDMzMb6U4ETszMaYGFgMvL9tXL7+kzc+rMfAD4Qvn5OLAgMDVwCkBELAGcBnwOmAOYDpir1//aCLgSmB74PvBfYE9gZmAlYC1g517v+QSwLLAiGj5/FrANMA/wIWCr3h8oM28DPgn8qeT9CxGxCHApsAcwC3AjavxVAw9bAeuXz/yfapoRMQ1wG/BjYE7gA8Dt5ekDS/6WBpYClgcO6p2vhmYHJkdl903g7PJ5lwVWAw6OiAUqr/80cBkq02sp30dDfb43M9cE7gF2LeX3G+At4PPltesDX42IjXultyqwKPoevxkRi2fmj4GjgB+UtJbqJz+fAdYBFgE2BG5CQYdZUJ1qN4ABfJdbAf8HbFjy8O3ympuAhYFZgUfRPtmvzNy2j7QuQN8RJX9Loe/vhnbpmZmZgQMHZmY2PK4pPbGvR8TrqEFf59/AByJi5sx8MzN/2s9rPwccn5m/z8w3gf2BLUvv/GbAdZl5b2a+gxq92ev9D2TmNZn5bmb+IzMfycyfZuZ/MvNZ4EzgY73e8+3MfCMzfwn8Aril/P+/oYbfMo1KBLYAbsjMWzPz38B3gCmAlSuvOSkzn8vMf/Tx/g2AP2fmcZn5z8z8e2Y+WCmXwzLz5cx8BTgU2LZhvnr7N3BkyeNlKKhyYvl/vwR+hYITLfdm5o2Z+V/gol7PtdP4vZl5V2Y+Wb67J1DDvfd3dWj5Xn8O/LzDvACcnJkvZeYLKHDxYGY+lpn/BH5Iz3c92O+y9ZnOLeX6L2AssFRURtB04FpgkYhYuDzeFgVK3hlAWmZm9j7kwIGZmQ2HjTNz+tYP/9uLX7U96uH9dUQ8FBEb9PPaOYE/Vh7/EZgEmK0891zricx8G3it1/ufqz4oQ+CvL8PF30A90zP3es9Llb//0cfjqfvJb23eM/Pdkp/qqIjner+pYh7gmSZpl7/nbJiv3l4rDXnQ54P+P3P1jhFvA5P3Nc2iRuP3RsQKEXFnRLwSEX8DduJ/v6ve6TX9blqafteD/S6JiNERcUxEPFP2vWfLU70/U1slsPEDYJuIGIVGO1zUaTpmZvb+5cCBmZmNaJn52zKUe1bgW8CVETEV/ztaAOBPaNG8lnmB/6AG3ovA3K0nImIKYKbe/67X49OBXwMLl6kSBwAx8E/Tr3HyHhGBggEv9JO/qufQ9Iy2aaNy+VPNa98Gpqw8nlDu+HAJ6lmfJzOnA86g+XfVX7kOxEC+y96Pt0ZTZ9ZG02rmbyXX4P/39XkuQCNP1gLeLlN7zMzMGnHgwMzMRrSI2CYiZim9tq+Xze8Cr5Tf1cbypcCeEbFARExNz9z1/6C1CzaMiJXLXPOxtG+ETQO8AbwZEYsBX+3Sx+rL5cD6EbFWRIxBt2r8F3B/w/dfD8wREXuEFkOcJiJWKM9dChwUEbNExMxomkbd7QcfB7YuPd7r8b/D/UeqaYC/ZOY/I2J51PBu6iVg/tIb3w0D+S5fYtx9eZryntdQIOeoDv5/77QogYJ3gePwaAMzM+uQAwdmZjbSrQf8MiLeRAslblnmqb8NHAncV9ZKWBE4FzWK7gb+APwT+BpAmX//NTQv/0XgTeBl1Dirsw9qgP4dLQL4g+5/PMnMp9ECdicDr6LF9zZsOg89M/+OFu7bEA3J/y1aJBLgCOBh4AngSbTQ3hE1Se1e0ngd9VBf0/GHGR47A4dFxN9RYOTyNq+vuqL8fi0iHh1sRgb4XR6NgjuvR8Q+wIVousMLaN2I/tb2aJdWy4XAh6kPGpmZmfUpMrs9Os/MzGzkKyMSXkfTEP4wzNkxG+8i4vPAjpm56nDnxczMJiwecWBmZu8bEbFhRExZ1kj4Dup9f3Z4c2U2/kXElGhUxlnDnRczM5vwOHBgZmbvJxuhhev+BCyMpj146J1N1CLiE2hNkJfQIpJmZmYd8VQFMzMzMzMzM6vlEQdmZmZmZmZmVsuBAzMzMzMzMzOrNclQ/rOZZ545559//qH8l2ZmZmZmZmbWxiOPPPJqZs7S13NDGjiYf/75efjhh4fyX5qZmZmZmZlZGxHxx7rnPFXBzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslgMHZmZmZmZmZlbLgQMzMzMzMzMzq+XAgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslgMHZmZmZmZmZlZrkuHOwPvK2OkG+L6/dTcfZmZmZmZmZg15xIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVmtxoGDiBgdEY9FxPXl8QIR8WBE/C4ifhARk46/bJqZmZmZmZnZcOhkxMHuwFOVx98CTsjMDwB/BbbvZsbMzMzMzMzMbPg1ChxExNzA+sD3yuMA1gSuLC+5ANh4POTPzMzMzMzMzIZR0xEH3wW+DrxbHs8EvJ6Z/ymPnwfm6m7WzMzMzMzMzGy4tQ0cRMQGwMuZ+chA/kFE7BgRD0fEw6+88spAkjAzMzMzMzOzYdJkxMEqwKcj4lngMjRF4URg+oiYpLxmbuCFvt6cmWdl5nKZudwss8zShSybmZmZmZmZ2VBpGzjIzP0zc+7MnB/YErgjMz8H3AlsVl62HfCj8ZZLMzMzMzMzMxsWndxVobdvAHtFxO/QmgfndCdLZmZmZmZmZjZSTNL+JT0y8y7grvL374Hlu58lMzMzMzMzMxspBjPiwMzMzMzMzMwmcg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWq23gICImj4ifRcTPI+KXEXFo2b5ARDwYEb+LiB9ExKTjP7tmZmZmZmZmNpSajDj4F7BmZi4FLA2sFxErAt8CTsjMDwB/BbYfb7k0MzMzMzMzs2HRNnCQ8mZ5OKb8JLAmcGXZfgGw8fjIoJmZmZmZmZkNn0ZrHETE6Ih4HHgZuBV4Bng9M/9TXvI8MNd4yaGZmZmZmZmZDZtGgYPM/G9mLg3MDSwPLNb0H0TEjhHxcEQ8/Morrwwsl2ZmZmZmZmY2LDq6q0Jmvg7cCawETB8Rk5Sn5gZeqHnPWZm5XGYuN8ssswwmr2ZmZmZmZmY2xJrcVWGWiJi+/D0FsA7wFAogbFZeth3wo/GURzMzMzMzMzMbJpO0fwlzABdExGgUaLg8M6+PiF8Bl0XEEcBjwDnjMZ9mZmZmZmZmNgzaBg4y8wlgmT62/x6td2BmZmZmZmZmE6mO1jgwMzMzMzMzs/cXBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vVNnAQEfNExJ0R8auI+GVE7F62zxgRt0bEb8vvGcZ/ds3MzMzMzMxsKDUZcfAfYO/MXAJYEdglIpYA9gNuz8yFgdvLYzMzMzMzMzObiLQNHGTmi5n5aPn778BTwFzARsAF5WUXABuPpzyamZmZmZmZ2TDpaI2DiJgfWAZ4EJgtM18sT/0ZmK27WTMzMzMzMzOz4TZJ0xdGxNTAVcAemflGRLz3XGZmRGTN+3YEdgSYd955B5db66qnFlt8wO9d/NdPdTEnZmZmZmZmNlI1GnEQEWNQ0OD7mXl12fxSRMxRnp8DeLmv92bmWZm5XGYuN8sss3Qjz2ZmZmZmZmY2RJrcVSGAc4CnMvP4ylPXAtuVv7cDftT97JmZmZmZmZnZcGoyVWEVYFvgyYh4vGw7ADgGuDwitgf+CGw+XnJoZmZmZmZmZsOmbeAgM+8FoubptbqbHTMzMzMzMzMbSTq6q4KZmZmZmZmZvb84cGBmZmZmZmZmtRrfjtFGjg9f8OEBv/fJ7Z7sYk7MzMzMzMxsYucRB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqTTLcGTAbH57f754Bv3fuY1brYk7MzMzMzMwmbB5xYGZmZmZmZma1HDgwMzMzMzMzs1oOHJiZmZmZmZlZLQcOzMzMzMzMzKyWAwdmZmZmZmZmVsuBAzMzMzMzMzOr5dsx2ohy3BYbDPi9e//g+i7mxMzMzMzMzMAjDszMzMzMzMysHw4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrUcODAzMzMzMzOzWg4cmJmZmZmZmVmtSYY7AzZxOHWnOwb0vl3OWLPLOTEzMzMzM7Nu8ogDMzMzMzMzM6vlwIGZmZmZmZmZ1WobOIiIcyPi5Yj4RWXbjBFxa0T8tvyeYfxm08zMzMzMzMyGQ5MRB+cD6/Xath9we2YuDNxeHpuZmZmZmZnZRKZt4CAz7wb+0mvzRsAF5e8LgI27my0zMzMzMzMzGwkGusbBbJn5Yvn7z8BsXcqPmZmZmZmZmY0gg14cMTMTyLrnI2LHiHg4Ih5+5ZVXBvvvzMzMzMzMzGwIDTRw8FJEzAFQfr9c98LMPCszl8vM5WaZZZYB/jszMzMzMzMzGw4DDRxcC2xX/t4O+FF3smNmZmZmZmZmI0mT2zFeCjwALBoRz0fE9sAxwDoR8Vtg7fLYzMzMzMzMzCYyk7R7QWZuVfPUWl3Oi5mZmZmZmZmNMINeHNHMzMzMzMzMJl4OHJiZmZmZmZlZrbZTFd7v5t/vhgG/99lj1u9iTszMzMzMzMyGnkccmJmZmZmZmVktBw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxq+XaMZm2MHTt2SN9nZmZmZmY2knjEgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslm/HaGbWj1N3umPA793ljDW7mJOR7fY7FhrQ+9Za85ku58TMzMzMus0jDszMzMzMzMyslgMHZmZmZmZmZlbLgQMzMzMzMzMzq+XAgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtXw7RjOzCczz+90z4PfOfcxq7/09duzYAaczmPeamZmZ2YTFIw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxqOXBgZmZmZmZmZrV8O0azIXL7HQsN+L1rrflMF3NiZhOypxZbfMDvXfzXT3UxJ2ZmZvZ+4REHZmZmZmZmZlbLgQMzMzMzMzMzq+XAgZmZmZmZmZnVcuDAzMzMzMzMzGo5cGBmZmZmZmZmtRw4MDMzMzMzM7Navh2j2fvY/PvdMKD3PXvM+l3OSY8PX/DhAb3vye2eHOexb1k3YZr9zscH/N4/f3zpruVjfBjovg3/u393y6k73TGg9+1yxppdzomZmZmNZB5xYGZmZmZmZma1HDgwMzMzMzMzs1oOHJiZmZmZmZlZLQcOzMzMzMzMzKyWAwdmZmZmZmZmVsuBAzMzMzMzMzOr5dsxmk1gJubb1ZnZhOm4LTYY8Hv3/sH1XcyJmZmZjQ8ecWBmZmZmZmZmtRw4MDMzMzMzM7NaDhyYmZmZmZmZWS0HDszMzMzMzMyslgMHZmZmZmZmZlbLgQMzMzMzMzMzq+XbMZqZmdlEZ+zYsV153+13LDTgPKy15jMDfm9/5t/vhgG979lj1u9yTszM7P3CIw7MzMzMzMzMrJYDB2ZmZmZmZmZWy4EDMzMzMzMzM6vlwIGZmZmZmZmZ1XLgwMzMzMzMzMxq+a4KZjZ4Y6cbxHv/1r18mBUDXXUe+lh5fqD7t/ftjj2/3z0Dfu/cx6zWxZy8T4zAc/dTiy0+oPct/uunxnl86k53DDgPu5yx5oDfOxQGeseQwb73/erDF3x4QO97crsnu5wTGw6z3/n4gN/7548v3bV8jAQecWBmZmZmZmZmtRw4MDMzMzMzM7NagwocRMR6EfF0RPwuIvbrVqbMzMzMzMzMbGQYcOAgIkYDpwKfBJYAtoqIJbqVMTMzMzMzMzMbfoMZcbA88LvM/H1mvgNcBmzUnWyZmZmZmZmZ2UgwmMDBXMBzlcfPl21mZmZmZmZmNpGIzBzYGyM2A9bLzB3K422BFTJz116v2xHYsTxcFHh64NkdkWYGXp0I0+lmWiMtnW6mNdLS6WZaE3Oe/NmGNq2Rlk430xpp6XQzrZGWTjfTGmnpdDOtkZZON9Maael0M62Rlk430xpp6XQzrZGWTjfTGmnpdDOtkZbOSDJfZs7S1xOTDCLRF4B5Ko/nLtvGkZlnAWcN4v+MaBHxcGYuN7GlMxLz5M/mPI20dEZinvzZJsw8+bNNmHnyZ5sw8+TPNmHmyZ9twsyTP9vEYzBTFR4CFo6IBSJiUmBL4NruZMvMzMzMzMzMRoIBjzjIzP9ExK7AzcBo4NzM/GXXcmZmZmZmZmZmw24wUxXIzBuBG7uUlwlVt6ZhjLR0upnWSEunm2mNtHS6mdbEnCd/tqFNa6Sl0820Rlo63UxrpKXTzbRGWjrdTGukpdPNtEZaOt1Ma6Sl0820Rlo63UxrpKXTzbRGWjrdTGukpTNBGPDiiGZmZmZmZmY28RvMGgdmZmZmZmZmNpFz4MDMzMzMzMzMajlwMEgREeM5fX9HQ2B8f4+D0a28dTGdEbdPjsTvbyTmqVtG2j45MXNZm9lI4fOIjQ/eryYcI64BMKHJ8bBIRETMHRGfiIgZMvPdsq3jg2p8NfBGYsOxCyaNiNERMcdgEomIeSJinYiYotf2gXx/k0D39rHMzCgGkJeu7JPlfZOU35MPJp3y3hlg/ByHgzXI8h5T+XtQF9Te5T3ItLpS3t3at1vnooiYarBpjCQRMXUXy3qybqRT0upGeY+4CmJETF35e8pu5rG1r48EI2lfH4n7Qbd1u7xHQpm1ziPl0jZi9qdui4hRI6G8W3ko5T16uPMzvoyEOly1rMvviba8B2OiPejHp4jYJCL2iogrImLzyvZBn2giYkXgKmBb4OJWA6J6su4guRMjYtPB5KdX3uYqeXl3sJ+1VM6mi4jpu5W/QToOuBjYu1VhH6DvAwtk5j+g5/sqjchOj7eDImKriJh9EPkhIpaLiF0j4uNZdPj+bu6TAIdExKXAHhERg7xg/DAivjiI93dNRHw1Ij4aEWtGxNERsf5Ayrv4fjm/TNGFC+o45T3ItLpV3l3Zt4H9I+Iy4MCIWGmAaXT1PDkYETFFRJyAVml+OiKOjYhpy3MD/e6+HRH7RcRClf8z0LS6Ud7fiIjlqxuGq4JeLe9yvK0DHAYs3rvS2GkeI2Jb0K2ry+MxA0xnwYhYJCIW7OR9vdLo2rW7W8o1cbaoBEm7oQt1sNkiYoYowbHBNI5LeXdtxNBwNq5KmXyh/KxSLm2tToRhbWB1uz4ZEaMy892R0JgFpomIDUp5/xfG6QwYrvPm6C7u15+NiG9ExIa9tg/HZ5um/F4fYKSU90jjwEGHImI64LvAa+hWlN+MiJsiYsUunWgOBc7IzG2AZ4EvRcR1EbEVNI/KRcTKwCrAHeXx1BExVVR6VjoRERsAt0fE1yNilsF81ohYF7gINdR3iA57QiNirohYPyK+FhGLDvZiUcr2w8AJwBzA5hGxXkRs3qq0N0xnTWCSzDyrPN4OODQiTomIOVoX2YZpzQPsCayK9oFPle3zd3LSjojPAKcCcwLnRMSBZXsnJ8Cu7JPl/24FfBy4EPgUsFFpbK8YvUZpNEjrM8CozDyvPF4qIlaLiJU73acGKyLmBXYClgKOBN4EzoyI0zqtqEfEssASwDvA9pXtAxm5ML7Le/VOy7uL+/YWwLrAucC/0bl45g4/U1fPk11wODApcA3wT2B31EhfA+h4346IWYENgMWAPSPiK+WpD0bEmE4q/F0q7+WBTYDHqtuHsYLeKu/tgN8ANwGfAV6pVBoX6DSPEbE+cEFEPBYlKJWZ/x5AOjsCZwCnA1/s5HpUSaNr1+5uKA3QLUKdLpcDm3Up3b0iYqpBBLSJiO2BM4G70PmcTq7bJY1WoP6SiJhzMPnpla+TI2KHXtuHsiFzDvAx4FvAbRFxVkSsFBGTto6VgSjXkY9FxAoDfP+g6pN9pLcXcE1EHDCYdAar1HPPBX4AXBsR15bPCvAudH7e7EJZLx4Rs2Xmf0vgb1ABo4jYFfgaMAm6thARc0PPiM3BpN9BPlplfUREnEGlvENBpP+08tRhuoMq75HKgYPObQw8lpkXZOZ5mfkh4BbgRxFxaAwiOh0aJv9P4Idl0wbAzMAVwFERsW8HyW0InJOZr5eKwynAT4G9ImL+AWRvShQsmR04LyK2Dg1fX6/kvZMTyLHo4nwqsDrw9ZJG05PEFcDSwDLoYnFkaQAM9EK6BnBsZv4M+B3wOWArYGVgxw7S+QeqfBIRuwPbAL9GDcCzOqn0ZeZzqMI4EzAaWCsivgWcBozp4AR2ELBfZh6A9onFI2KyDgJQ3dwnATYCjsjMm4D7gF1QRWQr1LDtxKzApSWfu6EG+yHA5sAnO0xrsI4FTgK+iipY/wV+CywPLNThBefo8nMs8PmI2AYG3Ljqq7y/zTCWdxf37T2AwzLzlsw8BHgJWKv1ZDTroe32eXLAQkHpj6CG+UxoHzi75PFatD90WtYvo9FUCTwALBwRF6F9dEyHFf49GHx5Hw8ck5n/joi1I+KoiHgqIvYYzLVzIKrlXRr1Y4HngT8Dd0bECaUSeV5EXBqdjY55El3frgCOjYjbI+LLEXFc+d9te9nL9WIv4EuoEbso8K0BlFM3r93dcC6wGmowLAB8ppTNAlEZxdLJ54yIVYDvoHrY1tD5KL9S3vuioOaXgU9HxBFN319xBqoL/BPYKSJmjYjRAw0glHPt1sDDwMdCo2QWhaFrXIVGHc6ZmV9Egb/XgZWAy4ALQx0525fXdhKM/CpwIvAV4HMRscQAsjfY+mQ1P7sDawMnAyuXBuVHKs8P5TnqOOBv6Hr0W+AJNDruUtS5tV/JU6OpUF0q65uAB1tBlUpwteNyCY3o2QX4YmYeCSwaEeeg/emyiJh9CAOcrbJ+Epge1XdfRXWcsyNiy2Eq75EpM/3TwQ9qNJ0LrNpr+/zAecCsg0h7NKq4XotOyA9UnlsKDV+dtGFamwLXATMC96BeteVKGhc3TadXmqeiSswmqOL5HHBCh2l8Bbiu8vgDwNXANOXxIsAs/bz/E8CtlcdbA/ejiPOqneSlksbOqCd9X3Sxn7NsX6t8Fws0TGfqUrY7AUcBH6g8dx6wQof5mhM4EA2fWgn4PWr8bQvM0+D9y6ORAqAg4WSoJ3Ozsm0LYOWh2ifLe/YFbgN2QL3ycwJjyn51QyfHD6p8Pg6siYJ305fP+KWS39r9qJs/wDyoB+AKNBrpamA3FHT6P+D28rrPAZ9qk9ZawFWVxxujBt+Wre+jw7x9YySWdxf27amBvYGFWmUCfBG4uvy9PXBwg3S6fp4c5L60N7AfCjr+Bvh5Keuvl8/XqKyBAKL8PSOqXM+JGo+/QpXRI4AlGuZratSgGnB5o0biS8D5wMJolMdnUSDkDlT5j2Eq78XLfvgMsGU5fn8HrICO75OBHTpM+wvAV8rfh6JRGvd28P49gYsrj6cFrgdmK48/BszfMK1BX7u7VN4fLPveKHSuPBxdkx4s+8Afy/6+cIfpzgh8DwVFf4bObXsAZ7eOhwZpHACcV3k8Xzk3TFseLwXM1CaNA4EfVPb336Br/5PA1wZQXlOW936wPL4JBW9/BdwNzDdE39t66Lr2QWBX1Mg6HAU3Xwb+DpzZYZrTAE+j89IsKPBzCRoB1Og8wCDrk73SmgJ4BFi2PP4RmqL5e3QN7WifHGR5LwL8tPL4z2U/uBgFEf4B/HCIy3ob4Meo0+7qchx/tvL8ou2Oj17pzQRcAMwAfAjVlVZF59tTgZ2GqayfAJ5C9a7bUHvvraEu75H84xEHHSgRzL+iytzYEoWapPTgPgssiCKeA5KK3u2Nol3HAj+pPL06MHNmvtMwratRb/dXgWcz877MfDgzd0QH7CxN81WJ3F6AGsM/RJXsAOaLiDOi+fCwh1AEb1R5zzPohL1yef7k8rjOX4E3I2Lp8vgF4BfoAD82ImZsmI/3ZOZpqKEf6KI8ddl+OzAXajzXip6F3t4s+f8i6rFYvzw/KRod8VaHWfsLmjqxJaoYvY0qR8ujHu12ngbODy2Olpn5L3TC36zk6RA0GqJWN/fJ4iL0Xc0H3Fz+x78z81zUo914+Hxm3oPKe+Oyae7M/FdJa5ZO0hqMVA/6bGi43W7AvJl5UmrKyt7AEqX3+gDglTbJ/R8ank5ETJKZ16BG7Iah6S6dDgk9FVXI50UV8sGW9yn0lPc8gyjvVg/oFgxg387MNzPzOOCFSpn8CBhdeuN2RlPJ2qXTtfNkl7QqZpsAb6DzHWjEwKc6KOtRWWowmfkXdC7ZDgV//oMCSv9BFf62yrntu8AfB1remfkHVEH8I5qq8K/MvCI1GmZjFIga6ikirfI+AjU0bwYORgGlf2Tmg+X4vh5Yv8lIgYrL0YgGUHD7AeDliHg1IqZs8P5LgFND03cmz8w3UPBv2YiYBjWS/9ZfAl2+dnfDv1AAZR3U8FgMBUSmQefQtVAdoaORbGUfvxR4LjOXR9eYo4HFIqLpKKZr0GgBSp3uj2X7vKVecXrJe59KPeBfKBAFCqb9LNVLvwsaffDBTj4XOu7vAaYOTYVcAjgkM5cAHqUylW08uxUFK76FpvIcigKI+6NAzd/RueDyUt9oYktUPn/KzFdQcHQqdB3PiNgoIj7UJo3B1ier/lk+5wcjYkt0Xtg+MxdEoz32H8IRB38D3omIfUPTS/+KruN7o+D9S8DbEfFQw+N3C+ChQZb1fcBxmfk4Gvn2XWD3iLi6jBQ6m87O339D57OngW8CN2fmveV8+2PgU6WeOr71LuvRwIooUHM3Olf+ks7Le7D79ogVzc6n1ltEbATsjyr6P0UV89Uyc9kBpDULamiOAu7OzLvLAXMcOkmfhyoym2Tmr9uktTXw58y8IyI+Xt63GqrU3YLm8m+SmWt2kL/WyXIMqoBchy7sx6AT94dLA6ddOpNl5r/KsL3/VrZ/HTW6xqBGV+3FsFyUvol6OmdFwZpDMvP2iDgVuKw0cJp8rlnRxeHRzHy1bNsWzbX6Q0l/iszcrk065wO3A9/PnsWQ9kXTBH6EGkJvZ+bODfI0HRq98sfU8OlAFcSNgV0z8+aImK9SqalLZ87M/FMf26dEJ/i3gTczc8+a93dtn+wj7dGZ+d/QfOsFUAR9GdRL0G9Zl/d/DPhVZr4SmkqxAbqQ/Q318EwLzJCZn+skXwMVmg8/OepFnQL4d2beU46bDVHD/07g+czcp590Zk0NLe+9fVIUsV4f+Ey5cLfL0xzAksDjmflS2fZl1GP8O7TfNy3vJVGj+o2yX2yEeorfQN9do/IuDcyPogrCK6Wi/S3g03S2by+AeofvyMw/l22tfeow1NN7UWYe2E8aXT9PDkZoKPAklP0HBURfRFNKNkejIO5B15smZf19FCzYpwQMiYhDgc8DO2fmTaH57u0CWa0AVmuBv1HlHNf63ai8y3unzMy3y98fBt7JzKfL412BdTJzo3b56YZKeb+TmT8r+/XfMvOdiFgN9VhPhs51N6Kevl9k5lFt0v0Y6j3/b2o6xpfRuXs5YI3MfKrhPj5Z63vrtX0X1Ks6A/CHzDy0TTpduXZ3U2ie/qrofDkv6t37NQpk3YkCLFcDn8iyyHCb9KJUxidD08W+iQI2+6NGyRrA0pnZOHBfSfNoNCx/YXT+Htvm9ZOWfWhy1Hi9K3sWEDyrPL6kwzzsixrZP0f1ka3K8x9Bwfz1M/OfTT/bQJSA2aTlZ1l0TpoJXW/3RqNjLkMjYB5pmOZUqLf3F6gt8k5EnI56fC9C+8J6mflazfsHXZ/sI81t0DH/x/JZNi7b50QjCtdusk92Q0SsjYKYt6M65KSoN34TFARYNyIWz8ynGqQ1GfreHmJgZT1HZr7Yx/aZUXmdjuq/2zb8bHNn5vPl74XQiM1jgVMy865Sp346M49ukt5glTrAWHrK+ujyeE10Llk5Mx/voLynRHWIR0GdNZ2U94iXI2DYw4T8gypNe6BhyR8dYBqXofm9p6JhhDNThvygecg7Ass1SGdu4E/o4Gttmx31gB+EIvInAws2zNdkfWxbDA2Xu7LDz/gR4Pia5+ZBF8VHKcMC26Q1Bxo29WnKUHvUeHkSWKRhflYA7kULz7wIfKFsnxpNfzgLVUCmbpPOiqj3bOpK3uZCQwxHoUbtvDQY8owqGg+gRvmLKGAwI6o0nNRBWX8YnYj3pQzZ6/X8dWU/mW5875Pl9Qujhtk8vbbPW/bL88r/+p+89pHWSqWMRvfaPjsK+IxFja0pO9k/B/KDggVj0Z00XkFDyO9BvWetqSG/RL0Yr7Up7/nKcTVVP685CJi9Qb5WR0MuL0JBgm+jwMw8JY0LOyjvxcq+dABqZLS2L416DQ9pUt5l33647FdPoZEqDGDfXgkFaq9AgaI9y/ZJyu+tUANk+n7S6Op5sgv70e6op/vMsh89h85lrSkBH0cByEMblvVy5Xu/kMrUGLQI3UEd5m0pdA5Yt9f2Vnlv2aC85yxpHFmOkfV6PT8baoAsNsTlfTAainxPKftry/6+TPn+dy/H7/HABQ3S3QpdCybptf0gFNym93M16ayOevOuRefZdVvfOToPP4N6s/pLo2vX7i6W+2Qo6NU6f2yKGkGXoqmCL6Ch/lcAhzZMc9Hyu3WsrId6Zl+lTMNBvXz9pfGR8t19t+RrdOW5RVCw7oE2acyAAv5r1zy/AKrf1B4nfbxnk0p5zYnqJue0jh903TlwPH9nu5bv5HYUEFsX9ZwegUYhvoQ6XjpNdw50/W/Vm1rf34bounUJ8M0231lX6pPlPStW/p6x/D4and8WQPXEtlPfulDeUY7xjdE56FZ0nb297J9XlTL/OBpV1iTNr1Cpf7behzoimpT1KqgO8MV+9u0Xmu7bqE78axTYm72yfXN0Xb4T+PEQlvWsqENjHxQwvh+d81dE9bbvV8utQbofpzIdoVLejfbtCeFn2DMwIfygi8daaH7Xh7qc9hqMO2/8EdSwa62k3G/DtVda56MK2uWo12y2ynOTld+Td5De0ajXef7KtsnLAT5Hedz0YLqensb5KHrmaY5BFYpb25y8VkAX5z7/Xzmpnt3BZ7u9kp+1q++lg3nkqEG1T/l7Y9RTcl/JT7/rB/SR1o2VPC2EKlCPUqmMNMkbPVH8x1EjbRtU6WitbbAJsNVQ7JPl/U+Xz/ENNC93+spzU3W4H90CbF3+XhL1fB8PrNVJnrrxg0YBnIOGs92JItWPoPUEzkIXoq+iyuj6bdK6gFIJRD05C6ALzRyV10zXMF9XAZ8rf8+LRonc0t933k9a06NAzV3lmNkNNUA6ne99QyVP30GV0QeAjTrct28Dtil/f6QcI2N6vWa+NmmcTxfPk4Pch6ZBozZmQ+fA40tZ/xm4shxvm9FHQ7CfNH+MAtqblmNvg8pzo6q/G6T1WDn2v4sCF0v28Zp526RxCZoKNl85B/wLNTrmL88vRzk3DXF5j0YNob+jxvi9qJH4yWp507zxcT8lwELPwrp7oeHkrUBL23JHlfBNUIX/MHS3nz3RIqug6UKfa5NG167dXSr3w9BCYb9C58Zlez1/cdlX96EEWRqkuTlwfx/bd6fUJWgWqPk5Chp+E43SuZjK2h/lPNHvubPsz2ehes5GvZ6bGQXx9u2gvLZG15IxjNsIWQM1HG+ggznXA/zOlqBnTZsz0FD+v6Lz54dRnXgPyvW86f6EGo53ouvU53s9NwkK1vUbjGCQ9ck+9qMHyt/Vsl4eBbsvQSOqhuI4+RaqU7yNrk2XoQDRFqW8fkZPfbPJueQzqNF/LSUIVXluchSgblfWV9HTudNaa6NaTlsCX+7gM26O6sjHl+9xGxTsWQWdkz9Kg06NLpb14yjQeDqqF7xYyv5GdN2atIPybgWPx1S2tUb2N9q3J4SfYc/ASP9Bvcc/LTvSueXg2Y+yYF45UX206Umzj/R3ALYof2+NGliTosbG96ksPtImnbWB+yqPzwa+XnncUf5QxfPfqAJyOBqePEMnaVTS2h54pPL4u6ixdRsaet1v/lBF7l1U8ViBPnrcUAS7bSWhvHY54EeVx9OhSuOm5fGyNF90ag10kZoLVdg/ihr9+6GelCkapBFoiPJ5aHG4SSrPbYBO7h01+lCjcyUUNb0PVYgaRXG7tU+W9y+OLrwfRyfq89Fq2suX5z9M88UnV0cV/I+hQMitaGjqfuiCuvlA9s8B7tMzoR6Yz1IqcOWYewdV/HdDPcd7NkhrebQA0zzl8Tlovu0F6IL/kYZ5CnQ++i5q5E1TeW6Nsh81rrxW3rs2ash8sOyjf0ZrXUR/x22vsrqiHBtToJXrd0CVmqdpOOIAzY+/ovw9Cl2IfwzsWLatTK8GSc1n6dp5sgv70QzlPPFJFHQcU8r4NdSg+Vopr70bprcBcG3l8RdQ5WzF8rjRObK8dk7UYFgeLUo7FjWOWqM8lkXDydulcS89AYvJ0AiYc1FgZOZhKu9WT+5+aFraqujctBdwZIdpRvlcp9ETCH0IVYwPRKMZPtwwrcV7fX/ToGvv8TTs8aSL1+4ulfkiZV+eEgVm/okW1Nu11/cyfYfp3kEJqqPpG5ugXtSVaBggQ1N3rqk83hA12B+nZ0HaGRukcVM5drdB5++dUUNk0bJ90w4/252tYwsFt/ZCPdCt0Qbz00EwcYDf2zfRHXlAo7R+iRrSz6Nz1f1owc2OFntDnRrrlHPKFajO8zU0NQw0Qmfbft4/qPpkH+ndU9mPPlLJz3Jl2yydpDeI8v4AukYfgK77l5b98O9oxMPlaIrRmA7SvL3sP8eXcm2N7GgFMg9sU9afp9Qb0Xn7IRqO6m2Tr74Waz15fJdx77Iufx9Cz7o7X0cB36PQOf29RYYbpts7eLx9OXY3brJvTyg/Xhyxva8Dd2bmuqjh82N0Md+pLJjzCTRcrqN7/bZk5veAq8pc9vtRg+2d1Dzmn6F5t00sgXryWgsiXQJsEBF7lP/Taf4mRQf2D9AKrmsCX42IVcv/OCiaL4IzEzBHRHytzLOdEVVwz0O3l1m+Tf4+gXosXkLfwd4RsUiZI015/4tZ5uE28CfgtIiYtMzV/Rs6ea1dnj8XNVLaysy7UGBpf1Qh+lVmPpOZx6BgxjwN0sjUvLlLUBR/sfK5IjOvR71Ny0fD2xyV7/8FNJzwQXTy+hfwgYi4sswt7C8/3donQQ39wzLzzsz8BtqfPoxuM/hZ1PhfsmFar6DK/SdQL+jLmblbKevTUMN2SKTmpt2OKnAHlHUAPowa7OeioeJbAAtG+9v3zIoqZZtGxImokrYZOvc8jYaLN8lTpm4rd0ZJY9WImLHMAb0LjYhZrOlxW9nfnkUjJ95A8xD/ixomF6EKf7t8vYYqHAegnrfnMvN7mXkV6mWYpd0+WTwDHF/mEb9bjveT6dkfT0MN1f50+zw5WK+joN7pKFjQuk3l8ahh9BbqoZm34fH/GjpftBZr+z6qfO4ZEdN0cI4ktUbKHpn5s8y8GQWy7kTn8pNQQHKaNsn8Ge3De5fHq6LK+Q6ol2eFktfo893d9zoq74ci4ljUUPxFZt6LelMnR0E2QgsfXxhaiLBWOe7+hc6xF0XEQSiYuBeqIL+GbjnY5DM+D0wVESeVecB/z8wf0bNAapNbqHbz2t0Nq6J9cFM04mBrSmAjdJvKyTPzr8CsoXWH2oqIPdEUhNvKcXFOSXcT1Os3MzQ6nn8BvFrWTQHVTa5Egd8Ny7nzL23S2BA4vJx7F0YB9j+j69XxwLupRVibfK4o+8kjaF8ANRYnRQH8HSJizcx8NvtYA6PL7gSWCq298zw6Xn+CguULoyDXgmix5aa3qPso8PvMvLWcU1ZEHTkzoNtfLpGZR2TmRf0kM9j6ZDU/26N95a6ImAkdr6uiYMHuEbFoZr4yRNeFJdFoqCfRSI59UNDpVTQ95GgUFGt0G+XQQn9/ycxbUCBiDXTHL1rXgcw8sk1Zr40CSGTm19E16eAotyqOzhaLrZ7nL0R3qfghPYu1zhURZ8bQLNa6JPDb0PpRr6Dy/iMKhm8FfLwcX6OBC9pdA8phOxk6z91XNp+I6qRTonPJ4g327QnDcEcuRvoPiiCf3mvbkuhCflh53NEt0irpzEo/PS5oB+xoCDbjDiFataTxhYHkkZ7bEI1BcwePRg2jW4GHO8zLh9Cwq/9QiViii+J2/aQxGlUuZ6mkcx4K4GyMViv+SQefqc+eCDRi4Huo0v39BumsT5m2gkYY7It6VS5Bjb49Kbfh6yRvaE7hk6h3bz50kdwLuKmTz1f+3gw1+K5rlTGw21Dtk6iRNn0f+8LUqLL1Wyq3UuqgnD6Oot+LV7btSYN5yN38QRXyn6Ee3SvRMOwp0RDjM1Dl87KGac2HLtB3AatUtn8VOL9hGpPTM+d3s5Kvo9AIjSVLGd01wM+6StnHb6BniGi/w6V7vX8MCvgsVI75ucv2XYDrG7x/UUoPQGXb6LIvnVuOl8bDSenieXIQ+0913unyqNK6AWqMT45GnXytk/2okl511NJkKOB2B81H9+xCGVnUq6wmQ+ekG6n0jLdJa1lUoXoYBZu2Ldv3Ar41zOW9JeW2uZXyvhtV0A9tcuwx7rSG/dF6FDfRM6rhSODoDvI5MwpuHdbaH8r2U2gwgqm8dlDX7i6X++zle98eVdAvQ3f42BINC76ulHfj2wui0Us/RWtV3IQa7q3/dRXlHNXP+1vDh6dC5+pLynnkfsroEHROX72/NNA56COVtI6j55rXCtytNIAy260cr/sDB1TS+wrqYR/v5yl6bpn5Kpq+cSu6Ps2LgshHoYDg1h2kORMKSDxUvvdLyvbpUKfQUeX//k8vL12oT/aR5mIowHYZGrHQGmExNRrN2GikV5fKe0pUB90J3br5cnSOnAQFxecp+1eTUYyjUN1hrsq2NVCv+o7l+dqRC2XfHkVPfWLy7Dm+zmSQ5210fbsMta8eR+eChSg980Nc1juiwMpO5bnTUIdb42tAJd0T0GicgyrH7XRlvz68bt+e0H6GPQMj/QdFM28oB8vyle3TlhNpv/M7+0n3UHSxehv4Th/PH0EZltsmnTFl55++sm1Sei6Mn0WN7H6H21XeOweqtMxcTh7VhYImK//r9dYJpU1arWFR81S2LVX5e5pyIlu8QVqT9nq8HrrwvINW427y2eZCQ6I2rmxrldMYVGl4kzb360W9zOMM2SpltRy6sF+FFt6ar0GeZkLBgi+XE9jM9AQxrkEVj3valTfjNhRaw9DmQPPIHxvKfbJSRveU332eKNEQvA82SGvOUh4n0zO/fYrK83OgoE3b/agbPyggsgIKHn4bXRSqx99DqOL3C/pZ8A1VOtcsP615dItU/g4aBmpQpeL68nNx2TY3qnieXY6VH7Xbt8v7ZkeVuLGosbJm2X41GlHTtJxmRA3Qr9Mz7HYUGnnwGIrIP9lg354XVeqm6+O5Uej89t/+PhtdPk92YR9qTd25BlWgvo16S1vf/afK8TNXu/2ovH4utGJzdT2UakN56nL8tL2nedlvXkTnnjkr5RyVv9+gn+H3qGJ4KOPO9Vyacc9Tv6HXootDUN5XoAr1J9EiiK1r1KeAe8rfm6DAyEN97XO90l0ONTinL49bC/feRM883ifa7VfoOrAgPdPlFkLH7ono2D0VjWKqvU86Xbx2j4fy3watA3AucEJl+09Lnt9lAPPI0fX2Xir1sJJev4sGosr8LPQECVZBAdbW44XR6Ija75+e62x1gbepK38HahQt1MHnqR4fe6NgysP0zOE/ippFAbv4Xc2LRgUthq5126I1QaZEo0Z+C/y1vHYUHUx/Ku+ZAk3l2K7s11OU7UfST4OULtYn+3jvZ9HUgGrax1ACCUPxg66Xn0XX6TOoTE1C14Idy+fraNoRCjy0zt0bojpev2u1Vcp67j62LVj2ydNoOG2i7D8fRWuJtKZzLcrwLta6GQpontlHWR9Gw2tAeU9Xg8cj/WfYMzAh/KAL8tdR4+UINCR5I+DXA0xvMdTQmQlV0m6gMj8bNSg+RD+VhMprv46Gs15Nmevbx2s27CBvF6GhpZtUtlUrjcdQGiYN0joOReAuR43pBSvPBapUNZrXVDlpVXvUD6eD1VfRkO/X0JDnW+jpKViq/N4O2K9BOmfRE01cGF1YL0C3fBonvw3SugpVDr+GKvY/RY2t0ajyOAfNKvvnoQvC/L22z0ZP727tnMhu7pOV/ejgyvGzPOptmr9sm4XKiu9t0rq0lNFO6II6Hxrx0Vrka2eGaJXa8j1dhypPv0K9Qj8seVqBnjl721MWMeonre+ghuOT9B2oORi4tIPyPhQtZngSaijtVymjSWi+yNvVJY2tyr74CAogzEpPw7ZthQEd92ehCsaFjDuS4pOoN7VJ4OhCyjoEKKixVPnOW/vSGugWg/2l0dXzZBf2o9+iaUn3o3nff0K9HOuX51dEFa3tabAuBQqkvomG4d9HOdcybqOm7R05yusuQIGMb6OhydVeq9aIrX7n7Jf3nl/+bu0z1Ybsp+lwLYEulfdn0bntNTREdTe0EOmKKJAwCQrC/IU2o7NKuveie72DrpOtu4XMgeoJG9CgcwFVUr+NzgcvoR64yVElfc2yH/R7rNDFa3eXynyV8vkPQ5Xx1iiI1rV8F3S9W5CGq7KjwOrh6HrQGolRrRMsTYO7F6Bz+KWop/Fxxl2kdfKSfrvgw/EoEHImZdRK5blJ0LWqcSMfjQrbp3KMjUbTsK4v++9xqME23tarQI3XW8t++AKqK/0CDeP+Eqrv3EvPegSNek/RtWMlKovolc93QvkevoMWcq79bHS3PrkyClbtX02j8vdyqJHedp/sQpmvhwJrT6Pg9Y1ohEcrALgHujacTJs6RUlvCVQP6fNuIuU88Dv6udMXuv6fW46Ta+m1rgEKLG3SwWe8C9VPrkDXu0PRcf8JeuqnQ7GORKusjyj78e9QMKO1Zt0e6JrcyTWgK8HjCeln2DMwofygaOvHyw7/W1Qh7vPWOw3SOoHKRQkNub+LnuFAje/cUHbwM1GF6GzUaDmsekLsIK2PlYNpM9Tj9D/DtNCFpclKxWuiSsH86KJxD+qlOgZVZqZElYo+T14oGnknlUWFqCzGhnoLv0ylUtvwM34RXYx3Rj1ev6IyDaDdyQtd7A4Gdi+Pf4IWmDkezUn8Rgd5+QDw08rjldEF4zRglw7SWRWNpPhOOUEdyriV9La3JuzyPjkVaux9rDz+cdkvz0SN5EYBg/LehancegwFN36Egng3UII17b63bvygUUZ/QBefB+np7XoDVehOQpWuFVr7az9pLUZZXbfsU61exUPR/O/RqOLQtpKIAj330lMxf7qU9xklb9t18BlnL8ftdJVts6JGzcU0XzR0dSpDolHA5Ur6ud1kTTofQY3qj5bHF6Ge8PPKvt5aiKjfCixdPE92YT9aG1XGVi/7zQLl8X3l+Gjd2SGqvxuk+wV0TtkP3aryKTpceR1dA6qLR55Wyq3Vc9LkzheLop6aVgNx+7LvfBedf0ejXsehunPF2oy74OACaIrRX9H5/3oUVDiCntv7tl0AthzDt1Yen1W+w5vpYDE8tIZFNZ3j0fDwmzo43rp27e5Smc9Y9oHDUeX8+VI+y6Bz+rxoGPYyKEDZ6C4tpXwPQuf+01HAdjrKHZdQj3y/gbZyDmiNLgnUi/47VN9oBdza3SlkXXSeXK18XzuV/Wybko/Zyz7fyV2sfkxl4TTGHb2wKApujdfFRFHAZCyqmz2K6gDnl5/7yz7ZOhd3sgDhj9G55N6STmt60JLovHUQsGo/7x9UfbKP9K5CgYjvUwLGVM63JT9tG+ldKO8ZUODqAlS3eQl1RlyBFpAcjRYRXobm54JWAHG3Um6tW7lOW3lNbVroXPIY6riaqxxTf0X1m4FMed6Icc9ve6PbMV5Fz1SI8T58v1LWO6FFH19Hwfa70DVgHVRH+Xx5fdOF6bsSPJ6QfoY9AxPiD23mB7V572jU0Fuy1/Yfo8rUCqhC3PZAQlHt0WjBsm+hBsQeqGftUjpvVN8PfLr8vXo5mVXvb9vJhWIfKr33qCF6ELqYtlZB769hdV0pk9tQ9HupXs+P6e/9fX1n5fcalDnV5aD+K4qq71fKskm5L40qLWsAZ1S2L44q2U0vXrOhoYit1ZsXLZ95RdQwXbphOhsCe5W/P4qGPj9cOQEeTP9D5kehCtDivbZ3vE9W3vs51AhejHFvdbktDUZ1VF4/BwrO7Fm+o1+V7bOj1XB3a7cvdesHVVKvQhX9DVHw6i7UI3MRPfP3m+zfRwAHlb/XQw29j6Fg2E00CD70Su8UdB44ibJacNm+JgoodXLsHkavW6KhIMbJNJzPihqLrREngXrx7qWn0nkYlVue9ZPOcuWzHYjmot5Wee5rlAZSm7IeTRfPk13Yj2ZDDZ/zS552QueNBct+sFFlH+l3ikJ5XevctiU9d/hYClWMXqUnyNnk3LYZlTsllH36MjoY0VOOy7+iBvpqaLrUp9AUgfcaHsNQ3l8px3CrvL+JzrNboEbJuzSc9lbSnQ4d/+ugWwBeULZvi3poaxtCvdLZnMoQbTSaap+Sv0Y91nTx2t2lMj8GBRvnRaP7voWCY39CjZJrqNxVoWGa6wF3VB4/ggK111IC7TS400Ap37PL39Ve5sOAUxrm5buUOx2ha9MT6Nx7fus7o7PV77/IuJ0Ih6Lr7z30s85Cl7+z0eWcsw8aLXIVqucchEYz7lTydS0Nbw1c0t0OuLny+GhKnaeDNAZVn+wjP7eUv79ayniZoSjjPvJyCLq+nYsCTzujRZdfAB6svG7zDj7fiigIcRZlTRkUtNmg4fs/D5zba9us6Pp7Qif7dXnvuqijp9WgXqucD/ZGAZJG9eVulXXlcavz8DdofaqBXAO6Ejye0H58V4UBSK3o/e8Bvn1UZt6bmU/Aeytfg3oyP4FOkN/Jshe2ycd/MvO/qGE4WWrl25lQQ+tXqFLaVlkRdCZ0cr82IkahhuczwJkRsXz5f52sLHs/sHlErBMRs6OK1eOoMrF6RExb9xkjYvryv7+GKrIPAxdGxMnl+bmArzQpo5ZW3lOry18bEeuioXffReW+SGb+t780I+IDETEtOtH8o3yWRVvlgyrKS2bmOw3z9BJq1G0dEY+jk+kPMvOnqILd6G4DmXkdOmGRmQ+h1aWPAbaMiGfRIna/7uf972bmPWgIWXU1/d/T4T5ZcQ+qwN6Egg8t75btjWTmi2j//iyap3tx2f5n1LO2WnncSd4G6nkUod4RrZZ7AmogbIAWaVoM3YFglXZ5ysyDUCS/ZevM/Elmno0uYGu2SwPGWaX4LNQ4vw8NdWyZCd2isJNj9/vAuhFxX0SsXvLxKgqQbNgkgcw8B1WkSfknChwsFBGroXUbftVfGhExZWY+jHotX0BlfEjlJW+h4a/tyvq/lfPk5AM9T3ZLOe5PR/vS6mgk23GZ+Xu0P61Zyujj/R23lfRa57bLgIfLyuXboZ7QNVBAqsm+NF9mXpmZN7f2q8x8Bu2nG0TEkRExdYP8HIrmed5Ufr6dmTdm5umoF/rj7dLopkp5fxg1gNZEvY2voilToPP5tcCRZaX1Jun+DZ1n10WBzJvL9otQw32Jhll8AFglIg6LiC+ixtvDmXkYWmn8A3VvLNfuGdGouW5du7vhcXS+/jMKHC6BAll/oGcO9/YR0eh8UvwV+EtEbBERxwNvZOY6qFG0Y0R8OpvdaeCnwLQR8c3WMVH29yOBOSNihYaf74iI+AI6P30xM3dD1/M5ymr8ndQRFwPGRMTG5U4Bi6CGzVXApRGxaAdpDUg5R16Pjs9tUQ/tJmgk3GGo1/tw1NP/0Q6SXhJdK1t+htZ1AqDcHWFUm7uO3Ad8NiLW7rQ+2YePoqAD5Zx0M3BKRKxU8tPRnQIG6V50fFyIgmpzo2Njd2CBiJgyInZH00Ybfb5SfzwT1QnGoiDLl9HdixZocHeXG4BJI2LTSpovo/r4TGjURyfuQyNC9o2Iw1Hg6ObMPA5dw5ueJwerVdatdtfcKPD7AvpcHV8DUB10TGnn7I7aYaugeuoJrTvZTHSGO3LxfvpBlblzUMPzY72e+xRqVN3cMK0Nyu8x5ff+aPjui6gSMxkD7Elj3GHu30A9Th3fuxU1zP9Q8vWdyvb76DUnsI/3jqIyrBn1xp+FKp5/p/nQxlGo13KJ8ntK1Ov0NLqXdNPFXSZDjeGNKts2RsMbz0cVyPtQZb9dWpOiHvnR6EQ2M2p8zl95zZO0iXyiESfLM+5CgdXvbip0ke83T9QvXrheh/tkoArPfPQMU96mfF83okrI45TF9hp8tk9T5nyXbfOgYXzfQY2Ax+nwriMD/aHnXu27o6Gtl5Tjeery/DWo9+Lednnqp7xbQyVvooM7FvSxn16Geqs2Qz1hTcp7NGqEL4suqJOW7+6p8t3thIbz9ruYXfneNkJrGEzX2lZ+r4UqJQ822CcnL8fVApVts1XSCtTgavvZqscFOp+diyoLgzpPDuC7mRU1EFoL4E1Zts2MzlMroWDdTQ3LKNAohTnQAqKj0dSOX5bP10mP52Qo2LtRr/Rb++SyqBEzR5t0qvPNJy3HyBS99u1thrG8Z6Gs9VDK+8/ounJ/2bYQZSG6/sq9/G5N39ir7Is3o8Daauga325NgjFoTZJZ0ZDgs8v+2erJHoWO38bDXHuV/4Cv3V0o+3nL/vRkOY5fRsGbvdCaMHOhUURt1++opDkJOv+eRznfVp77JmXdoYZpzVX2xT+g6/hU6Lr1GxosIFvS2BcFMk9l3EX1HkbB2k7KaxoUpD8fXbPnqzx3EkOw2nzZH0ehwPdYFEi5FZ2LjkFB1qXQOartSKhKuktSWZATnVceRees3Si9/w3S2bF8X+fSYX2yVzqtofvVa8n+wInju4z7yEurTvMsCoC9N00QTYk+pGxrtzhu9Pq9CaUnHNVRL0Sj677cMF8blTxdz7gLJP6SMhqyQRqToc60RdE0z/3RSMTWOj6To/PDgk3S60JZz4YCBc+iUR4/RaPGDkIB+8fK69peA3qlux7q9DuayohMFGzrcz2lCf2ntZPZeBYR09Fzy69l0IXiIeCtzHy0vOZa4NDMfKRNWnuhRuUnK9tmQNHXhzPzzA7ztj3wTlbuLxoRk2Xmv0oP+7loiPAZDdJaBjXqnkr1fhMR06V6Z4iIsejuFLX3oo2IUVnTQxK6f/hHM3Olhp/tO6gSE2hBrH+i4Vv/QL2Pj7Y+a5t0jkUn01FoWOnZlec+iSqgmZmPN8jTt1AFb9+a53dHIxf6jXpGxAmo9+xcNMzxwV7PfwE19LZukKc+yzwirgPGttsny2uPRw2ZxdDw1GfRhecPaNTCn4GXMvPGujR6pbUYahRdl5nfLNs/hHor5kP75HHt0hqsEjVeCQ0n/QPwpVQvZuv5TdDie3ehi+AWbdLrb//eGdgsM9dskK99ATLzf3rMy3Hyb7SOwvcbpPVdND9+WhSQAc33ux9V1t8AXszM69ukU/3ersnMQyNi0sx8p9zn+PfAI5n56TbpnIAWG/pi6L7Oo1CF/ufl+a+hY+TL7T5br3RnRxf4hzLztE7eO1gRcQuqMK+OzpFfT/V4tUb5HIjuDrEz+t767Y2NiG+jBtC8aDTI06gxtADwZmbe2+TcVtKqnt+Oysxz+3jN6pl5d8PPOjrVg1ndtjOaNzokIw4q5f0BdJ68EfVaP4eO1fVQkGwdNILtlobpHoqCTlHS+xaqBO9Sts2Evr9+r8MRcWZ536zoFmu/7vX8KegatUObdCIrFbnW+aWMDjmfhtfubii9tZOj6+uUKED+KvoOlkHT+WZGPaAnoAXWftFB+lOj89qC6Di5HgWmz0a9sr/s571fQ+emV9HosfNQ7/qRaLTdJGgq3AFt8tC7vL+Mprvche5esmhmbtLw88xEzwKIifbH+VrHWURMic7BW2Tm003SHIiI2Ac18D6Irtv/ROXye9SpdRG6LvwV1TGP7jD9yMys/N4d7R+fRrfOrP1sfZT3NJn59/L3WNrUJyvvmxt1OhyQmb+vXocjYg7UsP4HCtI0GbkyYKVePSkKFsyMRhn8CZ2b/oT2ifNQYOHwzDykTXqtcp0+M18v23ahHHOZuUpEzAn8KzNf6yCfJ6LA2s3omv5GZn6u4XsvQOfDpdCUwtsqz41Co32n6PQa3qlWWWfmqxFxJbpW3gY8l5l/jIg90Dl8o8z8ccM0W+fY2dH55CA01eRNFEh8HXUSb9LfOWmCNdyRi/fLD2pYXFj+XhrNZT0LnZgvQQfYfA3SmRpFIluLy6yEehY+Q2VVcJrPh5oJnax+hoYor1e2V3v7Z6PZIm1LoGDIFajHc79ez8+K5sgNdCREoOHFTeeOzku58wWqzOyJ5o+9d4u5huksTplvhiqgFzDAXhxUcanOZVwHDZvdm9JrgYYItl0BHw1DvhNFpb+Depg/R09Et990UC/LBxn3tm3v9TKWx23v6FBeNw/wf+Xv6dDUgiNKvvpdgb2PtBagZy2DMah36WxU0fxy2T4FQzRvFy3ksxMa/vsEuogeXY7fQ8t3t2q746TdMVm+ry/RYG0LVMl7vRxn9wOfGcTnm7Py3U2Gzim7oB7CTw/yezsLDZnfoWxfkTar+5f95zbKGgjle78MraZ9Sfk/0zc5RmrSr/agDMk9ldECcDfTM2T7cVTJeJxevUloFFG7Xu8FUHAWFIQ+vuwHY3un1yBvfZ3fFm6VTzeOM3T+3bLJvt3N8q48vgw1OH+HAq0X0nMbuLbz4ivprF2+s9VR5f7bqBeuNce+6d101i7f1wfQSKWD0RSz/ekZHbMhg5z/S8NrdxfL/bsoaPw4Gn11J2XkDFoU8RE0jPdE1BhukuYK6NxYvS5NhgJtF6GpD19vk8Z6aN73VvRM47mRslAvug42uqbQs6ZINT8bopEUB7c7dnuldTU1oy5QEO9ExnNPeMnzZaVsXgH+hUZX7otGMK2BgubT9/7cbdId564HvZ5bDY1mPKqD8h7da3tH9UkURPsVsFOvMm51oM5MBws3D7LMz0PXg1+i68CblHUF0NSy81Bw7VoaLLCJ2gBnlv2ldceL1VDgtPGozEpZV28LOkfZvxel3MmkQTofR+e3KdC57jjUeXQiPSM9PsIA14obQFnfhEbMvYDqN6PQuXetcty2Rnk13bePRAGCc+hZ4PGDaFrfUaje85Wh2JeG42fYM/B++UHBglNR5eA+em4vNhW6gDUd/jMd8P3y93zoQnwUqghdCMw6gLx9AUXMNisH18mogT195TVNFta6qnVSLieFR+g1XLPuJNg7/T4uEq2TTeMhgOVAvpmeBvm0qJLxVTQ8rFFZocpQ6/uaqTx+jAaLu/WR1ukoQjmm7BP3oh701oKGjYalomHJ06AG9Y5o8ZyT0AVob3qG0NcNi1+ylMGF9FExQUN696fhKtz0LIK1SGXbfGhBw5/SIChWed/HUQ/MNOXv11DlZafy/a3cabkP9KccEz8qx8S76GL8ILpVz3koUn0fZWHJ/o4TNNxztz729T4rRm3y9TnKfYHL3/eVfLZuMfp5Gga30DnlGiqL1pX9fEtU8W+0n/fzvX0FVYAaf2+oIrQJPXPH50bTYI6hTONoU9bb02sRPspisq2/h2ofKv9vk7Kv7FuOuW+goM9PUU/lfOV1KzfZD0q53kBPZX7GUsbfRkGX6forn15pfZdBnt9QQGiGXttG0UdFdCjLu/L4CVTJ2xudJ39EZfg3DRt75fj9Tvm7NU1wGdSA7eROOIfTs6jbrmi0yD7oOnANCrS0CzROXvm7v2NhqIJjY1HQ6RMoyPoCWvz3UtRw/yKwRodpfhKdd09EAYTJez0/L5XGXz/pHERpoNMzymNDtGjbOpXXNT1m+gwwdLKfozrSY5XHny3niOPRuSrQHXbG2+JxqP55N5qKdAPqgDoHjaz7B2psX4g6AN4b2t8g3dbU21MZ95aw750PynHYKGjXT3lP0fD9a6C61uooeHBMr+eH5Bgp/2ssPbeq/QIaJn8lauT/tZRb6+4ubb971Pn0IBoZsAsKzA341n+V7yh6788dHB/foOeW5V8t54PN0bntCQbQTulCWT+BRhpdis7X16O6xZV0MKWAnjt8rI4CIruXY3mNodqHhvtn2DPwfvlBFblj6bm4Vu+1fDVaXKdJOkHPCurfohLVQg2ZLw0gbx9Ejb6PoqGvD6Khe3vQPMI4D4rczVHZdhLlXt1ouFJtryi6gE1NafhUPmurcjZ1J5+pksaR6OJ3IloV9sCy/WQarlZMH/N6UcXv3L6ea5PWTMABKKjyX8a93eTplBEfHaS3CnBC+fscFHw4jnKXhX7edyMagbEoilSvh3rRP1aeXwhYqcO87FdOyBswbqX2RDqcs48aQI+jOZa7VbYfQq+L/vj8QUNiL0cX5wPRsNhECwXdVL7P4yl3sOgnnQ+jivQFwEJ9PD8zpcHQQd6qx9qMqOfoZ6gC+H90dieFXVAg6SuMO9roSDqbh1z3vX2TSkOuQTrrovnGNzPuatqfB65o896PluOrz5ENqBf2R52UdRf2o4VQxfUEFDz4OaqkfxY1Gs9FlfMXaFYxD9TAPxyNZvsB5fZhaPhrJ7dPHdT5DVWa7qP+1rqzDmN5H4vm1b+EelRHoRFxhwKXltduTMPbcqJe85sZt7E5BgUUfwDM2TCdDVADZg80+rDVUJgCXcf7DZCj89HxqHfyf8od9aYN2doGaNj1heiOBVeggOu3UM/12+h6+wN0jms8Uqjs42PRNfMudI5bhJ5blja6NSAaEfYgsEpl27QowHgVbUZlAB9CgY/TGbdDpdWYXqy6veFnW5aeu6B8qeyvn0X1qMdL/obiNsMHoIDavigQcys9t0/cEnVwXEplpFaDNM+lZ/2H36GgWLUXe74G5b0zqrv1dX76AB3cbaJ8po3K30ugxmLrblZdGVXVMB+tu/tsic5FU6Ch81eXMnsEjQB+runnQ3X31mebBNXB9qw83+8aAg3KemFgtQ4/5wpodPChaH2T6vnyNIagkd2rrKdB56en0B3bXkMdbtOic9XlTY9fSoCt/L0Xun6fiOp2xzEEoyiG+8d3VRgimfm3zNw3M8eii+gmZYXY3VEF97yG6SQ6GN9FvXCrR8QC5elJGXcF+6Z5+yUaArwoOonOgk70S6MFBZuk8RxqiP6tsvl0dJEHHVCT9ZPEmag3ce+IeDAi1kxprUy8V0Rs2+wTUV1t/hR0IX4b3bHgyLJ9aTQEq63Uyv6tdFt3HDgTNSAP6fX/2qX1WmYehRaeOygzry7zvUAn6GmapFNJ7z7gX6VsPobmKV+Heor7FBELoov4Cam5hWuV970FfDciNs/MZzLzgU7ygvbrn6GI7B4RsX7ZvjI0O9dUyvFo1LA6Hu3jK1bSGu9zxir5+AOKLm+Fhrg9gBrTZ6FRByuiC2+7FbQPRxex3wDXR8Sy1SdTdy3YJ3X3gUZ5q+6XmfmXzDwczRldE+1bjVdSz8xTUaN2CeD4iPhCREyKGvAvN80T9d/bKqiC1DQ/t6Dext8Dh0XE18tcxS+iRmh/dgbOzMw/R8SKEbFvOadsXtJ+Cc25bFvWg1X5rp5BFZR30Ln7sMy8Ch2rgQJ+X0LfW7ZLs7zmIjR6aQ7g7sz8TnnJB9G5pJEunN8OA85JrWOxaESsFxFHRc8dOV5m+Mr7XVS+u6LK+RjUyLsCmDJ0B5+vo/NWW5n5W9Sjd3ZEnBsRs2Xmv1N3pVkMBUmapHM9qmy+g86bb5bt/0CjwSZtk8QaaAjwGsAuEbFIREwVEQuV59di6FYrJ3UnoVvR8fks6mFcGzWADke91zugaTpzNkmz7IuXAqeWa+auKJB7EvDJMof72PL/+z3XZea9qGL/7dDdK2bOzDdSd3+Zl/bf2zmoY2MWVEeZvqT7n/L8xiho0lhq3aDfRcQWqAGzT2ZekVof4R50C+qhuBvGXWiE2HKobB9AgfE7UL3gCdRQn6lJYmWdiyuB0zNzFzTKdn3gpxGxami1/nZrE52L6otR3tf7TkxL0399spqfaVGA5kdl7ZVfoQDWnhGxdqlnDsldR1LrvtyHyncvVOZj0HHyf2gK2nyoLvzfmmTeU9b8uAt4tlwX/oMaweuW5z+DOgX6066sl0KjdBpLrbd1Ctp3jkbXkpbl0Xl5vOpV1l9Bo3OnQNOefoY6GN7MzCtRx+ds7dKMiGnQ2k1XlfPTOigAtTuqP83RJJ0JnRdHHCYR8Q10cD+K5mLe1uYtvd//YXSxWhJdjKdEvR0DWngqImZDPcbrAxdn5mERMUe1UtkgjfcWsYmeW9qcjebMzZKZm9a877Oosr8+OqFcgsrmBjT889WImLxbFc8SrNm4SVn1Xpin13OLoYt9v4tX9ZN2dXGeL6Je+bUHkM6nUOXze9lw0aLQood/RRXXJTNz+bJ9M3Ri/0a7BkxNutOiyuwSaCjX1MDTOYgFcCJiT3SSnwP4S2Z+ZqBpdfA/Z0QX9Pkz88GImAeNink7M/8vtAjoRWhRo02zn8UMQ4sn7tL6biPiAFQB3j8zX4l+FkscQL43RqNNGt/ysvLeSVGjc0k0/eFdtEbIHgPMy4C/t8piT9OioMNXUW/8y9l+oah9UK/mNyPiYVSBGY3Ob1dl5n4D+TwDERG7oUrJ31Aj//vZs7BXa/TYKBRQ2j4zP9Qgzep5dpJK46X12TfNzJW7kPe257fQwqHfR4HQ+9F0iedRsOlTwBGZ+cPB5qWp/sq7PH8qmgqyU2hxyfWAe0oDp5P/MzMa/v45FJCYHvX8t10Ur/c1pVz/DkENtURztjdrk8Y86PxzFwo2v4kafxdn5lmdfJZuCd3ibA/K3OWSt2tRg/NkFNC8H9g5y8LJDdOdNCu3OI6I9VBj6BNoPZ9b27y/erwsh76zNdAQ/SnRVMb1+nn/PmhEyKYRMS+qlzyMGlQnZeb5TT9L7zxFxJqoV37akta3ynnhEWCPbLgY6UCUINMYFNT5E+o4egV4LTP/ExG3oSDtVKiO0HaR5V7pj7Mwa0RshY6ZxYFPZubNNe/bA4183KQ83gHVbQ/r8CP2lXa1zrUT2hcO7bT+3YV8fAaNOhqFppbNh0bl/QmNFGp0S+9Keu8dIxExBepd3wMF3sZm5h0179uDLpd1H+e3j6Og3a0oODFFZm410PQHkJ/PoGlJgRaKvSgiVkZBspVRAGepgeQpIhYuweTW44fQek4/707uR6gcAcMeJtYfVAFfr/zua0GdxnPX6FkorPeco2VRZX9NKrcE6jCf1Vu4nNOlz95K89uoAVI79BLNXd+n8ngTFEj4NvC1QeRhdLW8K9tXYADrE1Q/W1/pNiyP/5mPiYaZjwWWa/KZ6va1yt+1w+7KvrgqqnR9t5T14ZXndwGuHOznogxHRaNiGi8+VpP2fCXPyzN0c+OuQY26C9Atg1pD0wL1Bh6NKkG7UpleU5PWcsAylX1yHjSX+dz+vqsB5vtDdHBrqjblPlMn+aPXHNiBfm91/5NyG602710QDVM/E42+OL7y3CxoOGajebFd+C42QMOjP4lWXX8QDQH+cjlepkPXh9FoqHsn67dEzd9rM8gFCDs5v5WyPhwNd76ZMvS6PPcl4OChKOs+ynvzSnnv2Lu8K/l7lwbXzWqZVPfPcn77KrqmTN/J/t0rnWVQAGZHYOaG6WyG7sACGr3wNzT6YxOGcE2JcoxvhxqDlLKerFLOd6K1IfYHLhnE/6mW1+HAj5vuz70eT4amjh1V9pXa+eDl2DyAngXQDkM9jq397Xd0uPhvzf9p3dnh26jRd+54/s4+hxpyx6C53mdS1ukpz7duv7oYuv7NN4j/Vb1F9OHA9f19V6jB21pQc3T5rh6svOZMGk6hrfn+q+fLY6jc1nM8l/knUZ1hS3pu5TqqfOap0Loyu/eV537SHNXrceuYOwhNzzq1TVnvTVkofHyUdWX7AmgU4qb9HW/jq6xLOY+qlM8taG2C+dBUg/kG8/nKc/sN5vw2If0MewYm1h/Uy/bTcvJ9nNKwo8zzRHORPtL7wK9Jay/g2l7bxsucLGCm8rsrC/KgNRP6XTQKDQF/Bg0pWh6NwlgbDY+7nA4bi2hBtTG9trUqa43mH9H34mqjK+l0VDFDQ70m6ZVW9QLWNr1q3isnwKj83W6e5xyoMv0jNP9s4bIfXo96u75cTqJLdfC5pmDcisF78wU7KSM0paX3d1ZNq/HCgV3YZ7+AGhxjShldjCrlt5YLTati3OTY3Zk+7ueOGuXXogr/oC6kAyzvvhazG13Zl5oeJ7OgkSWT90pnIAs+Duq4RefJ68r//woaJvko6omduhzTtw3hfnQsasC21mk5GN1p5MSyX7Uaok1X4t+XnobiOMdcp2XdjX0JBQ2WRI3e/dGc9vkqz38N9YAPaXlXHh+MGionUjPfn2bB2qVrtnelvJucR2q+ow+hIe2zoqG3+6PKa7/rrXS5zL+IriVfRHd7+gWVOxahIOm56Px+G80X/+2vgj4pulYN6O5MHXy21nE1Zfk9OQo8Va/Dp1PmyneSbl/HLLo+f7ocV40bawP4XKPRdf7D5Zy7cTlPPkLP4tZTtvJAB2sb9PfdlTTH0iZQh66N8/Tadi8avXYQZW2Swe5H5fkZGeDdeToskx1QAG0LNHz+WWCbyvOzA8c1yXPD/7cOCorWHm9lPxjV+/sdRFn/T8Cz7OtDvTBuq6w3rZZ15bw5O5oq03rcqKOFfgLqdCHANiH9DHsGJtYfNCfuoPL3gWhO9NlotdEPlQvEtg3SmQ1dkJ9EleJ5y/bWhWdMSWtQByc9vYWdRBfnpbKASyVPrbT6PSEzbg/CWijQciLjLqr2cLsLTeW1S6C5q+ejqP2HeuVrZtRo7rfHkS4urlby9H00VPNSei1eWfJ0QLvvD40q+Tu9biVU+XuGsp/VNrB67ZMHl/3xO6jH7Lzy9wYNy3pRtCL1GagyMFuvsp4JLYLTdrQB6t24py7vqGLcKK1u/KDAwXEooHIOCqrsUMp3z0qeTqWfAFvZT36Dpju8953RMxrjQ2guaNseRjQvePF+yqjRvl1e25XF7FCD8T4U3Lu+d97K52/7vXVw3Nbmh57z5C9KWpOX8v1B+fkuqkw0XjSwC/vRl9ACeG+hUVW3oN7gfcrnHIMqrv0etyWtJdEw4rP6+o47PN6a7kv9lfec5Zi9EvWSnoPOKR+o5OcxOghCdqm8H0WB6A+VfeHHpXxb5d26Ni1OgxEC6Nr6Lrpu/885Gp0H2y5CiOZk717KaU0UFK82Ghej9Gp3+Jk/U/arG8vjSeuO6/FQ3qPR7ZdbCxROUfaJF9C1YbJer286kqLPijzjBtr7DdqUc8oWKJi0aO99ucn3j6aAzE7lbjC98jBn2cc7GU21dF+fq/fnoQuNx37yMAuaDrommiITKADySdTD3LqWL0KHt3Zt8L+n6ue5+ei1CB8914BN0Xn9IZrfIrrf/YgO7hIxyM88CgXXdqNnQdPrUN324laZoGBy0zshVYNXdY3Z2gYxmmbzAH13agykrNdFa1rUPb9I7+92PJd16xrQWnPlsVZZV773RdudAyrpLt3gNR0F2CbkHy+OOB5ExNyoInB82bQtupjeiCrlX8zMa9HJu52j0e3yPowOgM/Ae4skgnoOJ8vKPNc2eds0IubqvdhV5f17RcQ2TdJCDc95K2lkr7T2bJPWiRFxdkTMnpm3Z+aKaG79SSWvJ6A51s81zM/xKPhwGhpCuH6vfLUWoPtHm3R2onuLq52GRpxciYanbx0Rt0bE8uX5GYE7G3x/s6KGw34R8WxZ1Ofd6Flkb07g9uxZTHIcfeyTn0NzkR9EPRy/zsx9Uot2NXEyGiVyF6qIfBrG2S/fQWX4rz7fPa5vAhdm5r8jYr6IWCkidoyIj5Tn/w2c0TCtbvgFGikwBSqfpdEc1NHAxhExF1q348zsfy7iUWjdiWcjYrGIGIvmQ38tIpbMzF+godyvNsjTreW9n4+IOarHb5m3+Sq6+0G7fRu6t5jd4WgO+eZoEcPNI+KysuYGaEHSJt/bCTQ7bvvLT+s8+SF0nOyamb/IzC1Qw/Y41MPTeIHGwcrMc9HxMQk67lYC/p5axHAh1DCZg36O24qxKLCXwP0RsSi8t04CZXvTY+QWmu1L/ZX3AWjI8WboOjYv6l3eusx13wINJR+yuZ6lvL+PGj/3Am+guf+To3KeJjtfzG5nVPZzoUBZb6vRbBHCH6LzWGs17x2B9SKitejYx1BjtlbNtftBFJzcG7RIYZtzUjeNQo2PD0TEzqjx+TxqkI6iLKwcER+IiNWbnOfKGi1Xlnna48jMjIiFI2K11MJn/fkhOsamRfvENyOi+j1tRPvv/yx0jjsoIsa5vkXEDGh6wdXlfNlWSePRiPhGOUbe+1yZ+d/QApeLVP/PePI6upZdi4KEx6J6yK/K9nXK69ZAdYa2ImLxiNgoIo4s31GU7a3fC0fEopn5Vj/JnAXMX91QKYcnUGDjysx8pUF+Nqb//egD6Hw8vsua1JoKD6DOsXVRuc6LevTfQY1b0HHT9lxS5uefFhHXlDn22ev5xSJiscz8XT/JjEXXjPeOgShQ8HU9GpZ1sR9a+4OImCIiPhgRU0fPWmdL0eECiwNRKeuFUEfE1Oj6+wDwH+BTlWvAJjS4BtQdt5XnFynl/Xx3PsUEYLgjFxPrDz29itNQudUTupD9GFigQRoLoYpBK0K2Fjq5H9brdY2GOqIAxruo92VDYLo+XtM0rc2Bn1Qeb4FORqdQIm/032O1PPAi6rH+OeWer5Xn50NDXWsj1L1ev1av/LQiqouVx/P09Xn7+vyod+mw8vhh1AO9PerRbHwrQNQLeguVnhZ08twZVWb6vU1OH+ltXfaJTVAg6jV6TWEZ4D45PZqf/D+3CaxJZ33gvsrjj6FhqLOWx7P09933Smtt1FhsjaT5ET09xNcBK3ZSRt34KXm6p+TjGHT3hHPLfvEPGtzyFPVU/YWe4YfXoKH026BG7Ikd5Gcp1KO/WUnn8nL8tqYVLdT6u0FaK6M7RayKKok3oIbW4WiUzSYN05mzvLe1T/0BVaR3Qws87dEwndWBO3p91o6OW+rPk0cN9b5T/v+sJQ8zoIr5l1Al9bPluH0b+FMH6W2Ngoutx0cDh1QeN+4xQ43fG0teBrQvlf3mMOCblW3Hl+PjEsrUNIZoiGof5X1A+YyTogr6yyjo/ijwhQ7S3ZNy608U3HyWyjD8DtJZkcp8/HJu2KuU/Q4N0/ifa3cn3/t4LPvd0YiaW1Dw8K1yTO9azgej0HlrnYbp/RT4dPl7YXSt+QSlx69JWuX1N1cez1f2zweALRrmY3N0TZsWrd9wCmrgHUtZtwfYusOy+nHZj06i1zTI8vwOaNHm8fl9tYZn74uuuzeiaR8Xl8/7NzoYnl5J91G0PtKxqF5yFrBw5fkv9/fZ6L8+2aobbNdBfga9H3W53DdGIxdfQLcE/AW6O9ovUXC5kyl9P0fXhBNQ/XlpNIKpVe/esU1Zf6bsix9APfEXUa6x9IzGaDzdCZ33n6w8vqx8pgepjB4e4rJ+Cp2PfohGTzwJ3I46Fb7YYXrDftyOtJ9hz8D77YcyJL/hayeh3Fe1ckB/EDWuOp7HiBpBny8/d5aT8gr0zMHdtIO0rkDD26dFF6EbUIP4OyV/U7d5//KU+7OjhueP0IrLm5Rtu9DBokOlXLZvlVv5fSrqeQQ1QhsNlUKV5xPpwuJq6OJ3Rq9t06HG49eq322DtNYBbip/L1JOgn8reepzIchu75Pl9R9BDcT3hsWWE/Rny9+X0GbBwEpaH0JBlCPK+y4r26cp23YfyGcaYDlsiSq7Xy/78ijUy3Fqef50VLk6pGF686HGwWvAvZXt06MKW6PAEar4VO8/vhOqhJxcyu9RFElvktaKpVwPRZX9qyrPNV7MDkXyTy3f2UXAw732j9Npti7BdujCOxk956GOjlv6P09uV90+BPvQUqi3+yJUUTkXNRJvKc+vgUarvNH0uC374Erl79FoKtV9wMkDyN/0jDv0eueB7Eto6sRVaPHQbwKPlu3zo+H4/7MI7BCV93lomHxrMdITUYB6TjpYzA6d285k3MbPzuX/LFweN10HZDZ6hipPX9m+BmpA/E9ltI806q7dU5TnPzMU+3clP61jdY+y/+yLrk+7lf3hPLSae6f75sWVxz9B58+z0fWyaXB0AVTZ34DKgqroen4fDQLkqH7TWqDvoPI9bYPqNzfQ4foWdCkINcjvbMnyPZ2MAgX3l+/vQnTOXQuNIvwNfQxh7yfdrYHrKo83R6Msf4buetEkjXb1yU4a1l3Zj7pU5lOU/EyOzo3Hod756ei5Hes9NFzQtLzupsrjZ1Gnwg2UaScN0ribMsUY1Wm/R4eN6V7pfQkFQE5Edbnvlu2rAr+mcr0ZwrI+FgU1V0Br71yDrrc/oIMFTUfCcTsSf4Y9A++nH7Q4zM+B9Qbw3uoieFui6HmjVbjLe0eVC0erV3hGNJT6rnKCPoZKlL5NWpOgqOXh6ILzPGU9gHLQfp8GDUfGXSxwDGpE3I6ihc90UDbzo2GRi1Np1KMo81God+26BuksgC6ei6MG5JMoItvx4mqlrNdClcNry4n1C5Xn9wQuaJjOevTM+94XzUG7HC0AN4pB9Kx2uk+WMv1I5XGrofYVFHnelDLftkFaG6Bo+WhU+biWUuEvz+8BnD3Qz9ZhOWyIKjoXotEOD6Fek01REGNWdIHcDQ3P7yTtVYE1Ko87CR7ugyqsU/faPnsp71fQUPcmae1djoWZyuc9iUolmoaL2ZV9cGs0GmAPNFy72vv8lYbH2x6oAtv7TjEdHbe93jvg82SX9qNL0AildVHQ8P/KZ7wE9YRcgQIJjY5bNPplk1IWU1W2z1+Ol51p3oBdCAUdlqCy1kPZl/btZF8q71sa9SzvSTknlP3qoaEu7/L3R1AD77JyzEyJzp2zVl7fdjE7enplW9fKVuN8ZnS9u5SGI6oqaa6Cer2/gAI0k1byUzvqgGbX7qNpcO3uYpmvhBp5J6FGyIGoTtBalf0XqOHYCr50sm7S5aWc9qPc7Qddl3+AbuMIzRpH25bvak10vmsFOi4AtmpT3mMo52tUnzmpUvaToeO5bbCnkmbXglCD/N6eLPvfXmjq2wPl9/+hBtc66Np0Fg1HZpR0V0CN8nnK4y3QeicblGOldt0Vmtcn2y5i2ivNH3RjPxpkea+Ogh7Hld+7ohFfrQVNn0QjEA6kYUcgOndcXj7X9ymr+KMA6n30MwqmlMvk9HTOtY6JTVCdu9Hop15ptjrpZkFtiBeorA2GggnbD8G+3busdyvHamtB02VRUHxSGi5oWl7be+TMkB+3I/Fn2DPwfvlBF9XF6NLQnXJiXrvp/678PSnjLqq3cDm5/5sGC1n1SmteynzNXq95jAEs9FTeOw1aBLBpQ3YO1ON0N+qd/FzludnKyfn/aLMoWknnbhSdvhJdROdDFdPW0Pm72qVTSeunKMr5k5LOmmie3r0oMvwMlUZym3SuKp9jcXqGlz021Psk6g14oZyAT6GyYAy6ED6Ghoi37a2opPUY6lWYr2yvVtIfabJPduMHVey2Rb0tC6DK4nEocl2tsD9G5daXA/g/k5bvsu3+XfbfZymLK6KFfcZZQAnN4V66YVp/pDLKATW0Fi9/T4kq+/2WdyVPC5bHY1BD9G7Ug7Un6mXot4xKOs+ggNq1VFZ/rhy3f2yyL7X5P43Pk13Yh6ZEvRp7lLI8FZ17Ti2f51Xgng7SmwMF9U4DbkIN9cnpGV3xKXR+maFBWnOW7+hydC77QcnnApXX/L3JvtTP/5iWMhd5iMv7i+W4mgo1YE5Dgb8dGfd61WgxOxQknoY+gt+oV+taNKqik7uYTIICWWeihv5xaErFs+i+6XXv69q1u4vl/nPUq7wK6s17CjViVizPP4wCXY1WZWfcBQenKmlejCr4rYbJYbQZwVC+szmBFcrjPcpxcwSq8H8FTamao2F5twKQ0/V6TeP6DT2B9ZnL78lajxlgEGqA39maaJTb7GU/Wg71VJ9HZZooWsvj8XbHSK+0Zy7pnI2Gzj9ICdai80xtjzPjqT5Z9oWjUCOvo/2oi2X+CGrAHosWoP49atwfjTrIbkXB90YLmtIT0PxCOUbOY9zbmB9NGcXbrqzL42rH3TpoBOJGNB8BuxSqn1Y7e6pBg/nKdzfbEJX1Z1Dg8gjgz6WsN0CB1xlLWXe0oCk9Uzhad8Mb0uN2pP60Tmo2RCIichCFPpD3R8TJKPp2YJbFTiJiUuC/qUV5jkENt60apjU5Wp3/pbJt2sx8o/z9HTQ8cOeOPlhP+p9Dwy43bfj684DfZuZRZVG2E9DctqfL8xejBcm+2kE6G5Z0VsrMVyLiw2ho8T9bn7lNWucAf8jMIyLim+iE9QZqKM2Dggd/zsxHOkjnQNSgfRPNIf9lZt4QEZNns0Ua+/s/bfep1msiYnd0UX4JDf28HzVAv4ZOpi+nFqTrJK1XUU/lT1BkeGc0J3Lege5HnYqIvVFF84rM3Csi7kJzo5ct+dsV9cbMkpl7D/B/BJof++nMPLXB63dE++AXI+KjqGfiLTTM8RYUyFoxM8/qMK0VUKPlDTQv/BZ0cZ0jM7/RQTorop6Pl1CFfxnUkHsoM+9qk865aHj7KeWc8vvMPKHy/IXoeNux3WerSX9Q59mBiogNUKXuj6iBtQVqWL2NGlqnZuaRETFFtlnIMiJOA57LzKMjYk+0/72Jpiddn5nXRcRymflwg3ydUtL6VkQsiYJ1z5V8Hokaoatm5vcG9skhIkaj3qwrB5rGAP7nBug89Bs0ImwmVJldBO1fW5fXTY96IP+QmYe3SfNytJDWVOh8fVBm3lh5fglU8byrTTqbokbUn7Ms5hdapHYN1FiYD83rvqOfNLp27e6GsnjfscBWKBiyFAq4Po1GRR1aPvdWaFTMp7LNAmtlAbtJUUD0oXK9/Qwqp+fQd/s5NHXgxX7SuRQdH/OgKYa3RMRsaD77zKhX/Z7MvLqfNFr1mwMq5T268v0djxoMja5LETEN2o8+kJn39vG5f4BG+nwlGy5wPRARMT/qPZ0DBdwuQ9eiU8r2z6CyPhWdJ/o9RkqaswN/zcx/RcRU6Fz3AqoDPFb29VvQsPg+F8bsZn0yIuZF+9x95dhYGY38WgsF6BrtR91QPvtJ6Pp6OZqydjQKHiyCzi/nZsPFeiNiMRSsOwp4N7Uw9txoWuUDKID/DWCtzPxzTRp97dtj0Lnk3Yg4BHih6TUgIn6IgvzXZOa3y7ZW3W4MqkO/nJmHNUlvoFplnZmbRsQNqH78STR94x/o874dWtD0eFTXaHcNmAlYENVBr+r13JAdtyPWUEQn/DN8P/zvIoT79np+BjRUacoBpLVfr+cXRUMBB3wPYtQL3ui+uig6fieV2zWi+Xv7l79nRcOU2t1Wrq90TqHM90Ynx3Ub5mluFOls3fv512iEwWdRL9Ohg0xnMzpcXK/L+9NypaxWQMNnf4oqC58qzze+ZWIfaT1Y0tq4PD9kw8BQZfMfqPJ0U/m9F+rN/A09ixwO2T2JS57OKn+fiBpG86CK0Jk0XDi0QVqnoF7WtvNIa9JZAPVoHdnwPFK3mOGhlddEJ/vSSPgp565JUEXxHjTN5Bk0ReRY1JN3Cg0WoC3pHEaZzoBGZuyNgg9fKd9/056hdosZ7tEq8+Euw07Lu/z+DGoIPYsW/5wMXYfeRqPQ5kCNt7aL2aHpJPejCvZkaE2B51rpdJC33osZztBp+dLFa3cXy3w6dF16qpTJz9C1aY6yz6+BGm9voTskNUnznPL9XUnPtXv68l2uh9Z0aTc6r/dihqehtVMOpvltINuV95Ll2O5k6sWlqCf+Rspifb2e/yCVaWzj+bvbFnVa/AONLnoQ1TPuQD39M9JwcdyS3q1ovnhfn2sS1HA+ooPyHlR9Ei3wvE0f25dA09++0m4/6mJZT47qRq+i4NrOpaxuQVMxvtZhencCX608nr783gGNPDiPfhaPbLBvj3OLygb56b3A4sVURuag8+1QrW0wedkXH0Lno51L2V+E6m4PoyDAKPqZptQrzdai0XcDm/fx/JAdtyPxZ9gz4J/x/AW3X4RwdxoOB26Q1peH6sRcydOHqMz/Rj3E3y9/X0+DFfAbpHMNvYbPtUmrv7sX/Jh+7q/bMJ3pUEWk0V0QxkOZb18uxJOjCPoe5UTb8SrFNWldR8NATRc+y1RoxMSkaJjbDeh2VfeX5w9Ht667mTKdYojy1ZqPeB6aGnIjlYAaGp3RaJGeBmnd3eQi2DCdtRqk02jRVyaQhmw5rg9Hgb0z0Ordj6MA1H2oF/wFFCC7h7LQYYN0F6umU9k+upR1o3TKe/pbzPBshjAgNh7Ke2HU4P9eeX6WUt7Lo5EVp3SQdjWd6jDq76CgT9OKdd1ihq0hr5uV37X7OF28dne5/HdGQZrnSp5uLvvXd+i5o0ajOw6gkQm3oWHqq6KG+dIDyFPdYobHo/PUdA3SaFfeO9Bw0d/y+v7uzDBP03QG+V1NhTpQWtNI5kW91h9Ew+dfRLflW7XDdEejaWm/pWfF+nlQHXA+dJ3ot37SoLwb1yf537sybIWCpcfTz1Sg8Vz286HA8R/KfnA7Gip/Dxrtt0TDdJbp9dnGlmPuGprXJduV9TeozOVvkF5XF1jsQllPgq6Tr6DbL5+Kggjno7rSL2m+uOpmlDs9oUD9Raiz7no6vBPaxPoz7BnwzxB8yV1ahLBBWr8fhs8WvfIzLeq9OIKykvlQptPmf3R094Lxnc4g/v9iqEfnEXpW0B1QEKObaQ3gf3+YnpXvn0SV/YUoixminvRHUaVkOMv7c6i34meogr7ZQPPTrbS6mM6wLmbYpe/nrLIPL1kqLf9EjfGF0HoCJ6BGzGrAzwaQ/mwlnaPRHOWBprM0w7yY4Xgs74tQj/UeqOd0g/LaqVCjstHiaqiRdRm9bidaSaffhiNdXIi4vLdr1+5BlvkMqBK9JQp+fxKNUhqFevUvQSMP5qGzgNY1VALOJa3TK4/7XZ+EZosZXtT0nNJPef+aDus3tL8zw3RD8L211jS5HU0n2QCNDDsaNf7/SJleQMPe2GpZoeDVNCgI8RwaZdNJA7Qr9Unq78rQWiyv0SjWLpT3LGgU0Paosb5z2acPRAvdLoJ6wU+jBA8bpNma8rEgmhL6QxSAOATN5V98kGX9K+B3DdPo+gKLXSzrlVE98lQ0fXLzUtbzoADzBg3T3YUyIqN8dz9Diwp/GwUg5huqzzhSf4Y9A/4Zpi++w0UIhyqtLn2248sFbI2RkE5Ja8B31Bgf6XTh82yKKn2t1cEH3DvczbQ6/L+XA18uf6+J5sZdQ+V2dGgaS6PFDMdjPkeVi9+BKHp+KPCx4Uyrm3nqle6QLWbYpe9mDjREsrXvToOmS12LGrKLlArHL1Cv0ycG+H9m6kY6vdIc0sUMx1N5T1sp79vLsfokCvyNQYGAp2lTuWbcRQeXR8NvB5LOeF/MkGG43qKGy7dLOf8KTS9o3fJySTQCYQ/UUDq/QXqtBv9n0Wi/6sijh8rfX0XzwJuW96AXM+xGedPlYMYg8r1+OTe3FtU7H90a+GoUTPhmpaxXooPeVHoWeTwSOLr8fQfqxX6JBqvWd6O86eJdvrpU5rejwODZ5Vi5Gk0PeaZ857uh3v1f0cHIGjRd5yBKAKKy/ds0DEB0ad/u6gKLXS7rC9GaD2uiO2CdgEb4TIFG7jW9BeMKjBuAWKny3Bk0vPX1xPzjxRHfpzpdhHCo0uqGspDMtpl54AhJZzSqNK6bmScNdzrdEhHTZebfImKSHOQCMd1Mq+n/Q8PrTgfuSi0OdBIaQrghWjzwRjpYzHAoRMSozHx3JKXVjXSGazHDwSoLXp6FphKcHxHLoN7ljdEaEJejOZatOe7PDuJ/TduNdCrpDflihoPVR3lfinrCV0GV6stRQ3RnVFmcEvVc7tEm3dYihAdnzyJtuw0infG2mOFQX28j4mNo2Pea5TN8DU3BeBQ4OTN/ERGnoqG8h6BpfU+0SbO64OCkmflO5bnGaXV7McOa/9Hpgs3V/z+6lNl0mfm3ymseQ43rpwearwb52BaNqmstPnkLCszMjOoRy6Ph5dd2kOY45/qyKOLBaD2RDTPzoxGxEvBSZv5+gPluXN69ynpetCYGWVkweCjKuvyfFVBD/glU3g+jXu9fAn9BDdJn0FSr32fm2DbpvVfWEbEQuq4si4Izh6MpJteiaUGPDzDPne7bXV1gcaBaZZ2ZHyt5WgIFkB5DnT//RCPH/ohG07ya7RdIr5b3CmhEzqLAW5l5eVkU8QHUJnhy/HyyCYMDB+9TpdI4VZbVa0dKWt0ykhpFlbS60jiaUBtZI01EfAUN374V9Qyvn5mrRcRngbky87vDmD2bQETEOmjo6WtouO5PUneL2BP1Mu4/rBmcyFTK+59oca6H0ciD54EnWuUdEWugiuSbrcZFTXrLo+HMP0I9rz/IzKMqzw80nYsz89jK8zOgEQw3Z+bbnX3qcf7PkF5vI2J1tFDs31GZX4qGOT+JGkbro2Dr94BjMvOABmm2GiAHZubLZduYzPx3RHypSVoNyntJ1Eu7fWb+veMP3pNOR+U9FMGMhvlYBDWeFkINntVR7/tX0eiDEzPzoQ7TPBk1ig+qfLbt0OjMjoIQ/fyPxuXdzbsyDFZELIX22wUp01SAd9Bddg5E03umQtNesl29slLWB1eOkc+g3vTGAYg2/6OTsm53vLXuqDDeO39KWR+PAsVj0aiOddA0qqXQKLpnQ3fLuhv4eTU4WZNma186OMudKUJ3juooAPF+MGq4M2DDIzP/262KRzfT6pZuNfa7lU5JqyuNfQcNuuZCdAH+FOqVaTXw5kS9MWa1ImKxiJgLLaS5MKqQfykzTykv+RKaH2ld0Ed5fwsNk14NDQFeDdi0BP5Ai4rN2l9jv+LEzNyppLNCRDzQhXRWjYj7I2KT8tzngd8MJmgAw3K9vR/d5nAT1GC4FvUwfgetXj4/Cqz8Bs2d71dpgGwG/Be4NSL2BcjMf5eXNE6L/st7eRR8GHDQoOSrcXn3+my3VT5bK2iwJBrF1u9tb7shM3+D5vj/Ho0OOQNNubkbzQd/PSL2iojFm6RX+WyJPtvXy/+5AE29GXTQoKTXqLx7lfUtEbFfeX8raLAomgc/3su6/N+fowbs02iK41toJMYuaH9eCAUPFmwQNKiW9a0RsX/5H1dl5vpoxf8vopFAg8lzp+eS/o63r0fEwuM7aADvlfX1aFrC62gq37dQPW5adO0F3UHkjQZBg+q+dHNEfKP8n59m5vzonPdttKbH+55HHJjZ+0YZ7r098C8U/b+w1VNRnp8e9aRtlpkPDksmbcSLiAPR3TgmQQtUvQhcn5kPR8QotAjZJzJzl2HM5kSjrrzRNIJ3UXl/EjVkP48q7pNk5gcapv9eL1kZert1l9OZE63LsFCTdEaSMhx9eTTP+lZUSV8ZNcrmQNNylgZ+1aQHu1TS18zMY8o0iL1KWt/OzGtKD3bTtPr73ibNzAU7/LiD0s9nOzYzfxgRO6A7mjw6HvOwJVp/5m3g/4CbepXRtehYeg2VUdN9u933tlf5X091/UN1lp9WWX8ZeDgzHxvP+WgtiDpzZr5URujMhM5Rj6DROSegWzP+t0l513y2WdFtoa+MiH2AH2Xmb8fPp6rNV93xNgcd7EuD+P+9y3oVtGDwC5n504iYBq0f8SIKIDQ6d/dT3iekpinsDNye43m6y4TCgQMze9+IiNbcux+joZufRMPdjs7MNyNiYbQC+6XDlUcb2SJiVuCBzFwoImZCIw3WRYuWXpGZd5cA1H8H29tpnZd3qTz+CfhsZv54EP93RKUz1CLiYNSDtxBa/PDISm9uqwG6DOrxmywzF+gg7f4a/B2l1Ufaw1rewxnMiIiV0WKhZ6KA2g5oCs/FpXE/A1oH4CeoZ7yjMurjs30OfbbZGYKGY4P8VAN1kw1F4CgivoUCmiuj0R0HZ2V9h4g4Dt0Gcz06KO82n23MUJd1naE83hqW9ZzAjp3mqVvB4/cDBw7M7H0hIuYGrsvMZcrjJYB90Ly2/0NzUrs2NcUmTmW48VHAFpn5VkRMCVyMhgCvAWyXlYXQbHA6Le/o0uKBIy2doVR64E4DvozWNrgSVaC/nWVec2gO/d/QbVkH3WjoYqBmxJX3UDWuIuIENJ/7/NL4OQwt1jkZ+u7+GJrXviXdWxx7RAXGhrghuywK0qyHpvPciabunJKZR5bXTIvu+rBhF84lI6qsYeiOtw7K+j9oWlU3zt0jrrxHAgcOzOx9oTQ4foBuF3ZVRCyIemd2R3NBj8xMz0m3fpXhkqejhaueQYthvZSZB0TEOcD3MvOB4czjxKTT8o4uLR440tIZShFxAOplO6w83hRYEVgc+GFmnlu2j7i7M43E8h7CxtWO6BaXO2bmHyLiCrSY3UKoJ3bn1F0eurk49ogK1AxlfiJiV+ADWe62UqYpbI1W438A9Yh3rbxHWlnD0B1vTcu6m3kaieU9EjhwYGbvG6EFz1qLID6LVsA/sVSU307fScEaiIipUa/Gx1Cl5aLMfCcibkXrZlw0rBmcyLi8h1ZErAocCeyKRhRcjlaMfx4t9rZzZv6zyw3QEdfg75YhbFwFcCzqlf0tMCozNyrPXQ3skJl/6fL/HFHf21DmJ3TL7mPRGgYvoQX0zgZuR6v+793NfIy0sh5KQ13W5X++b8u7Pw4cmNlELyLmQKvrvhURk6Ehzk9k5ovl+ceBQzPzh8OXSxvJIuITwGLAKqgRdX9mvll5fgd0V4WVhymLExWX9/AKrSy+B3AH8M/M3L5s/wnq0fZCYSNEaAHLBTLzFxExM1r4dzrg2cx8IyL2QEPl1xrOfE6MIuLzaN2VXwJ/z8w9y/b7UYDt8WHM3kTFZT0yOHBgZhO10G2aPgwsADyIVmL/Seo+v4EWeFozM79Un4q9n0XEpGgxvj3RQmDHohWzD8vM+8tr1kaVGd+NY5Bc3sOj9OrNjdZ8eQn4JzAaeCcz/xO6L/qnM/Njw5hNq4iIedHc7/+gheMeAa5Cd6b4V1nb53zgazlEdzyY2EXESsBswL/RyI7fZmZGRJTfXwfW93EyeC7rkceBAzObaEXE7MA9aB7cnMAhwJLADWiI87OhVdv/nZl/Hb6c2khWerc/kZmfLY9XQfeNnhUtznTScOZvYuPyHnqlAXop8AfgL8B8aBjwDZn5TETMBuyN1pT4zfDl1KpCdwp6KjOPi4gN0aKWNwNXZeZN5TULZOYfhjOfE4tynFyJ6hWvoWkhDwMXZObPI2JG4AtoIeYhvV3ixMZlPTKNGu4MmJmNRx8Efg1kZj4P7IvuAz8TcFxETJGZLztoYG3cA7wREUuURTZXRr166wHLlB5y6x6X99DbD7g6M7dBi1GOQUHWbUrv3kvotrUOGowQZe2PaYA/AmTmdeg4eQs4JiK2LNsdNOievVBQZm/gPOA3wIzALhExe1lD4kw3ZLvCZT0COXBgZhOznwAvAwdHxBfQkOeHM3N34A10f2WzPpWpLKBF4V5DtwX8HrAuamT9HpgZWHV4cjhxcXkPj7II2MvADABlSPtvUdB1URRwxQHWkaWs+XEFsFZEfKmsPL9Sub7tgabnWXf9Bo3GoayR9DbwUzRK56tl+1vDlruJi8t6BPJUBTObqIXuN745qgA/je5n/U5E3IKG3V4+rBm0ESsiZkEB9hlRI2qR8tRbmfl8mX/5PWDJ1q2gbOBc3sMnIj4EjAXeQT3Y62TmchExF5ritUtm/nsYs2h9KKNvvoxulfkP4LbMvLncSm6bzPzksGZwIlPWjDgVBTAfAVbMzOXL6I+z0CJ9rw9jFicaLuuRaZLhzoCZWbdFxMeAudC9x8/PzCPK9knKIl87ATM4aGBtnIV6N95CC2yemplXApQh9BsDJ7gR2zUu72EQEVOVFfmPBJZF0xRai8WuACzloMHIERFjgB2AyYApgZMz8++V5ycDDqf0ytrgldFQ05cA5s7AMqjsTywvWRtYzA3ZwXNZj2wecWBmE5XSA/MQurfv1MBJwA+BfVtzPSNiU+DVzLx72DJqI1q5hdnqmblpWaTpONSr9yu02v+LwEyZ+crw5XLi4fIeeuVceSgwDxppcA7wYAmuBjA9cDewV2beOmwZtXFExDHA/Og+9tsCGwHHVQLkc6DbL541bJmciJTj5ATgA8AL6Balt2Tmy+X5GYGrgSMy87Zhy+hEwGU98nmNAzOb2OwC/CIzL8jMU4GtgYWA+yNia4DMvNpBA2vj38AtAJn5f2iF+ZOBJ4CPZ+a7bsR2lct76B2GFor9CvA6cCdweETMkOpVeh04wEGDkaNM5/kU8JXMvB2tP3EdsGFEXBIR02fmiw4adNXhaIHlT6BA5snA/hHxkfL8X4FD3JDtCpf1COfAgZlNbO4H/hIRk5fH86IhbhuhYbdmTfwcOCIivhsRHwd2QyNZrgc+ExEzD2vuJj4u7yEUEdOh8+GZmflWZu6F1o9YDrgrIpZKuW5YM2q9/QVd45Ytj1vTFdZDC/4uNkz5mihFxFRo5NM1AJn5HbTC/+zA6RGxWjlOfjJ8uZw4uKwnDF7jwMwmNn8A5gZ+HBG/RBWsLTLzjxExNiKWy8yHhzeLNlKVBZkWzMy7I2JV4AhgE+CwzHy0NLgWREO7bZBc3kOv3F7xbxHxY+DjEfEGMBWwRGauERF7ojUmfj6sGbX/kZn/jYgHgSsi4gHgX2iE3V8j4nfA+mjleeuCzHwrIn4EfKpM3/kHsApaP2kbFHy7ZxizONFwWU8YvMaBmU0UImJaIIF3ywVoDTT8+aXM/F1ELAtcDHzIi6tZnYi4E/W6fiIz7y/bRmXmu+XvC4E/ZubBw5jNiYbLe+iVNSReAj4IHIwan6OBBzLz+IjYEtg0MzcfxmxaLxGxcWZeU/6eCi0Weg/wXGZmRNwPnJWZ5w9bJiciZUHWfwALo+k8MwOTAndl5pkR8QlgL2C9dGNqUFzWEw4HDsxsohAR56Jesz8B06G7KdxdnpsKOBZ4LDPPHr5c2kgWERuh1cpvBNZBCzA9Wrkbx0zoFmcn9puQNeLyHh4RcQfwk8w8tEzpWhx4KjP/WZ5/EDgjM88bznxaj7J46Kcyc93W8dHr+Q2AzzvY0z0RMRY4LTNfLneymAkF3CIz342InwGn+zgZPJf1hMOBAzOb4EXEPsDKaDjbR9Bw5/+ie/8egXrU5s/Mp4ctkzbiRcTPgd0z866IOAmYMjN3GO58Taxc3kMvInYFdgV+C/wgMy+uPDcGTVHYKTN3HKYsWi8RMQ1aOHTbMnpudWBV4E3gGRR4mxP4d2v1eRuciNgNWCszN6oEMufLzD+W5xcBds3M3YY3pxM+l/WExYEDM5vgRcSBwB8y85LyeB8ggFnRrXy8Krj1KyKWAjbJzLHl8czARcDbqHH7fF89fTYwpbw3zcxDymOX93hWpnPdAmwKLAF8FzgmMy+OiNFl/vwYYHRr9IENv4g4BNgxM+eKiLmAa4EbgDFo8d+DM/P3w5nHiUlETA08CWyZmQ9GxI7A0mhkzp+APUvP+HtTqmxgXNYTHt9VwcwmBk+gFdn3i4glge1QBflOYPsyf86sP09VggaTZuarwFbAi8AGAG7Edk9m/rwSNHB5D419gEcy80+p25kdA6wbEXO21n3JzH87aDDinATcVhb7vRv4XmZ+MzP3R3dS+OSw5m7iswrwe2CRiNgW3eL5R2ha1Vvodpi4IdsVqwDP4rKeYPiuCmY2wcvM6yLibXQLtw8CJ2XmzyPieXSvcgdJrVYZCjxvRLyQma9n5julB/b1iLgauDoi/ulFx7ojIlZECyL+KDOf61XePwSucnmPF8cA1WDMlWiK110RsW1mPjg82bI65bj4K7BdGaXzReDyykumwnX5brsLjeZYEfgEcE5m3gwQEbcDG0bExQ5sdsWdaBHrNdE6Ny7rEc5TFcxsghURX0IV359m5vfKtsky81/l70uB37R6Ns16i4gVgKPQrec+B1wKfDMz36i8ZmHghcx8e3hyOfGIiFWAk4FfA+sCy2Tmc71e4/Lusoj4CnBBZv4zIkYB2VqdvNx+cR7gkMz8+3Dm03qUANvHgY8C56Pr3MuV5zdA56rlhyeHE5+ImLJ13ikjFVcBHs3M18q2c4DfZuYxw5jNiUJELATMCPwOde6sAtznsh7ZHDgwswlSRGwG7AmcDowFvgz8BkWv/4Ju57NnZn5juPJoI1/oXvYXl3neKwPXAM8DR2XmlcOauYlQRFwLfD8zfxARRwKTo6HWZwOnZOa/hzWDE6GI+AxwBXAd8IXSg/3ebS/L9K59M3Pb4cynjauMmDsArWOwOPAa6qH9MfAu8DW0ts9Vw5bJiUhEfBzYApgCTVV4BLijEkjYADgwM1cavlxOHModdfZDdbbfZuYRfT3vsh55HDgwswlSRDwO7JOZt0XEccAiwCzAg6hn7dHhzJ+NfBExI3AmsH9m/q5sOw4FDj6N9q9HhjGLE42ICGBq4DS0CvxtaFGsXVGD6EDgOeDLHpbaXWXI77eA9VBQ9agSMPCCYyNURKwEHJSZ65fHMwBbA2sA96ZvUdp1EfEbYGfU+70JsAxwL7q18y8i4pPA25n5k2HM5kQhIp4A9gb+Dzga+AkwP/CrzDw7ItYF/uWyHnk8L8rMJjhlZelLS9BgamAztKDa71ED5OAyZ/fN4cynjWyZ+ZeI+BUwtgyLXAlYNjP3jojRwPKo18kGqQyL/3tE3AhsjCrmz7RGdUTET9Boj+lQIMG6oNxx5i+ZeUtEvAMcBPwVOLUVNHAAYUT6HTBlROyNAuGvAqeW+9mfERH3ZebDw5vFiUdELIsWyL2tPH4COB4t0LcR8IvMvGkYszjRKLdX/F3rblcR8VHgcbTI9XoR8Wxm3jKMWbR+eMEwM5vgZOYLmfmt0ov5H2DDzHwyM9/KzAOA6dFwQ7N2jkLDJXdFC2J9vWwfA3xouDI1EbsSVch3Ae6JiI+V4N9OwJjW/FYbvBL8eg1N6SIz7wKOAHaIiB0jYlRZfM9BgxEmM18B9kK9sJtGxDJl/v1DwKPAssOZv4nQH4BpI+LIiFgMjTgDrceyfsT/t3ff4XaVVR7Hv7+YEMoEEAJDU1oQld6GqiBIG8UJARmaVEVGIEF6b4IJVSBAUOZBEJAqJVIDg5EwtIgiNYJAjICRRCQRJCQkP/9Y7wnH672h3EP2uZv1eZ48uXvvkzzr2ffstvb7rqW1qwutdl4m9vX4MnXtIdunEO147wMGSsoX220qfzEppR6lVMBfDphgewowTdKTkmTbkk4nCiJOqjTQ1LYkLUq0/ptBJJ4u7lB0bAliyGq2OWsBSfM2WvyVGgYPl/WPA0cTLRhXI4pTphYpLRYvhigaC0y3PVrSCcT3/wHbT1YZY/pnkpYhCv5+2vZZkkYRLelWBvpK+htRgf6oCsOsnTL6bDBxDroeeAg4xfbkcp5ai0jYpG6y/SbwZUk7A5OA/5a0oO2pkpYkpijkdLU2lTUOUko9xpwq4Je3a6sBPwG+aPv1ygJNbU3Sz4g6Br8nbsjXIebdjyg3iusBy9u+bg7/TXqfJN1D1Dc4qLwxbd62MTAN+KvtF6qIr44kLUWMmPlcZ/PhJQ0jpnh93vb0uR1f6lyZsvMQ0U3hJ7YvKNe2LYkK9EsTNQ4erDDM2pD070TxyVWAnwHTgb7AO7b/Xq4FPwa+0Cgqmj6c5o4VHdafDXyBmK6wCbBx7uv2lYmDlFKP8V4V8CUtD/Sz/XiVcab2JWkA8R3aoCxvRIwueA0YZ/uiKuOrG0lrEsm8W4GdiKKIJ9meWLbPA8xw3oy0VCmI+DDR8vIO4HsdEwSSlrM9voLwUick7UQUB91S0uLAT4GpRNLteNsPVxpgDUkaTRTmW4VIIF9GjED7c2lb+l0iiZDFKLtJ0iXAZKI+1eNN6xcB1gVWJNox5v1bG8saBymlHqFcXN4g3sZg+wFiTtwVwAGS1rL9Yl500nuYAkwvwyQh5oD3J27S95S0VmWR1dfJpfbIhkT7xTGSDivbLgW2qCyyGpK0FzCz7PPtiP1+vqQ7JW3S+FwmDdqOgEclLQwcDHwC2ItI/IyUtEZlkdVQafk3w/aJtncELgB2Bi6VtJrtWbbPzqRB90naHRgEvA4cKGlwmRKI7ddsj7I9Iu/f2l8mDlJKPYLt14CniAr4X5J0DFEB/wfAbcTNcUpzVGpfnAfsXlp6ng5ca/sh4gZ9tQrDqx3bjwGjJPWxPcn2XsCewGaSJgOrNSqZp5b5PHBi+XlvolPFccCdwA2l+FtqE6XIL0QHl88CZwM7AGfanlqucVcRb8VT67wF/EFS/7L8GDCSGBV1gqQssNw604HDgWuAXxDTQ06U9HUASWeUl0OpzeVUhZRSjyGpD3AYsB7Ruud2249IOhpYxvYBlQaY2lq5QZTtSZIWBDYFnmi8eZX0BHBIo01U+vDKsboPcF1jvqqkeYkCfY02gG8SHVHurS7S+pG0pO0/ld/BjsCoRrcKSecAY2zfVGmQaTZJfW2/XX5eoaz+DJEoOBdYhnig3dn2M5UEWUPlQfV8YAIwE9gaOMr2vZIuA84tic/UAo0iuWUKyKrAxkQCYVuiIOL6lQaY3pdMHKSU2loZznYIkbHuTfS0fqZp+5LAI8C2WSE8dUXSycBKwEDgAttHdNg+BFjd9r4VhFc7ko4g3nrfBdxp+0cdtm8KfM32oVXEV0el48x8HTqEzNOobVDeoD5APID+rqIwUweSriOmS33b9nNl3QDioXY+4G/AU7aPri7K+ijtX/uV5NrywO5Ei8CJtm8vCc6xwH9lwdbuKV1CzgWOtP28pF7N7V8lfYWogbNRnpN6hkwcpJTamqQbiQKIDwKLEXM+nwaOtv1HSesSFfCvry7K1M7K0OxriTZm8wE/IhJQ1zZ9ZmFglu2plQRZM5KWBk4ghv1uRUyNfBl4y/ZQSfMT+3tahWHWSnkAvc325Z1smxc4g9jnB8/t2FLnynFwPvAVYH7gRtt7NwpXShpItAF8qfmBK314czpOyvbzgPltf2vuRlY/ZeTGesBw243WsLOTB5JOBZazvXt1UaYPIhMHKaW2VUYb3AxsbXtKWbc4Uen4U0QW++XqIkw9gaQfAJNtn1aWBxLFx7YpQydXt/24JGV1/+4r7eMAhgGzgOHEkPnTiCHXh9t+qaLwaknSZkR3mY3K8prAssACxLzihYBvAD9sDItP7aG8ld2KaAd4MbAN8fvqV3repxbp5DhZh5gK0g/4qe1ZktYmprDNqCzQGij7+lTgGOJ7PdL2UZ18rrftd+ZudOnDyuKIKaW2VVq2jQKGNK17FTgL+CvR67q5uFRK/6TMp7yx/AHA9s3AtNis9YErMmnQOrZn2p4JHA/MW5IEixJtz54m3nyn1jocuB9A0q7EPh4IbAkMLXUmLsykQXsp56e/EImDr9reBfgVMTpnnKSNq4yvhjoeJ0OJ42SL8jO2f51Jg5Y4lijweR+ROB7Q6Gak0Kd8bmZVAaYPrnfVAaSUUmckLVESB9cDl0v6MnCM7ftt/0XSk8TwzivzgS91pQyJHCNpAYi34eWh9gWiGNZuwNn5HWqtsp+nSXpF0qXEvl6HSPj1n/O/Th9EGe7+KLCUpCOBfYkb9SeANYEjJS2TozzaTzk/vSXpWGDvMud7CdufkpTFflvofR4nS+coxu4rxYdvsn1LuRY8Lelm4LuSJpdOOjMA8trbs+RUhZRS25HUF/g5cEJpk4ekwcBBwO+AW4nuCt+xPaqyQFOPJWkb4HbgbttbVx1PXZXpRkOBsbYvqjqeOpO0CbAH8Hpz8U9JY4F9s0d6e5N0HDFK5wDb/1t1PHWVx8nc1aGmwf5Esv7kbMPbM2XiIKXUdiSdTlzYpwBH2B5Z1vcBDiWGdv7J9q3VRZnaXWP6QcdKzk3bfw6cZPvRCsKrnab93RjV0Vg/+213TglpvU4qlfe3Pbn8fBTRLWTXygJM70sZFbVF0/Uuj5UWyuNk7un43W1eljQM+IPtEZUFmD60TByklNpKqYB/ue31Je0GbEj0Vn6j4tBSD1Mqyb/TKLxUivbNarqBWcz2pCpjrBNJiwFTmtr/9SH298wsgPXR6+TBaFmi4N4Otv9QXWSpo66SAk3Jt0watFjj+OgksZnHSYt1lawv2xYhrsvZwagHyuKIKaV2cxBR1wCiiNFSwChJy0EWQkzvTdLnJV0FnEkUPtwHZhfts6T+ko4m5tunbpK0qqQbiMJjoyUdCGB7RkkaLAb8rCRyUos0FRdrPHD+0416eQj6Wj4MtQ9JC5YkWiN5qea/iQJyn8mkQes1jo9yTlLT+jxOWmwOSYPetl8D/jaXQ0otkiMOUkpto7wRXt32bzqsPwd4HTi1qwtSSg2SRgO3AY8ACxPJKAPH2n5E0meARW0/WFmQNSLpTqLuyGXA9sAIYBzwXdtjymcG2P59ZUHWjKSNgL2BxYj2ls912L4ycY83ror4UuckXQn8lqit8ljT+sZIg28Dfy6dX1I3KdqSbgqsDlwFTABebIw4KNeCXnmcdJ+k1YENgFWJERzPAROb6husCCzVuCaknilHHKSU2kZ5Izw7aaB3+8FfDWxG9IXPUQepS6UY39vAZbZ/afsW4KvATcAQSSvYfjaTBq0haSkiKXOh7TdsXwGcC9wCHKjoUU8mDVpuBPAL4EXgUElrShrQ2N/Ew9JnK4su/QtJQ4hz0fzAbpL2lPRpeLeyvO0fZtKgpW4iqvf/Bdiv/NmmafTTl8jjpFWuIq69rwLDgfOALzdtXwvIUWc9XI44SCn1CJLWBfa3/c2qY0ntTdLJxIiCA5vWLQScAIy3PTznELeOpB8TQ09PJR6M9rC9maSLgNG2r6s0wJqRtD2wn+1ty/J44ElAxCibU/K73X4kDQLeAJ4lWgmvVJbHAGOJhNsBtnMYdwtI2oAofrtNWV4C2JV4Kz4qO1e0jqSBwDdtf7UsfxH4HjFS9BrbV1cXXWqlHHGQUmoLkno1/920XqXQzq+ItwUpdUrS1qWg5kRgJUlPS/oGgO0pwEvAumU5H6y6qbzhXhu4FNiWeLs3ADitfGQCsE5F4dXZOGCqpKNKLY8Hyg37McBWwC6VRpe6cjvw/7bH276QeEP7FrAJcDewSCYNWmoCML+kwZIWtj3R9jnARcDBktarOL46eR54W9IXy0jRJYDHgLOA/ST1rzK41Dq9qw4gpZSKPpJmdqiA71IFuRd0XXAnJUlLElNZHibqGnyHGII6XNK+wP8Rc8J3qCrGOilTFC4FJhFvu28FpgEX2H5F0qLA/uT+/ij8HrgD+BwwHXgKwPZvJd0HfLrC2FIH5U33X21PK8u9bM+yPRYYK2k/YDlgmwrDrJWyj18pRXB3BLaXNBZ41vZoSWOANYiRHqkbSsHDJyT9BtgZOARYCDjU9q8lvUgkkO+qMs7UGjlVIaVUKUX7xcOAPwHLA7favqZpe3/gFKLQ2tvVRJnaXRkW/0fbQyUdRsxdfQF4k3iQuhp4xfajFYZZG5IuIPb36ZLWIjpYvEiM6hhGzLFf2fbwCsOsrUZLOUlLE+fPR4A+wJHAFrYnVhpgmk3S3cS1bYjt28q62e3qJJ0KzGv7sArDrI1yT7ETcDYx535HYHNi2PwneLfewUa2X6kozFqQtAbwQ2BPYDxx3Z0ETLX9XCmIeAvwpWx9XA85VSGlVLULgGeAG4mhbadLuqU8jEAUXrs4kwapK5J6A5OBfmXVPsC9wA1E8mACkZDKpEELlNFArwF9AUpB08eJN+ErA9+yPQq4sLIga0jSIElLlzd8jT70rxI37JsTN+1nZNKgfZRj5XbiOjZM0qhSwHIfScuWQr8nZ9KgpUYAk2y/afud8iLiAOBB4HfEeWufTBq0xEnALGCQ7bdt32n70ZI06AX8D3BzJg3qI0ccpJQqI2k54BLbW5blRYHjiDcDCwJH255eWYCpxyhvma4hRq4saHvjsr43MJpoWZedFFqktN46kSju9jww0Pba5Zg+Dvh208Nt6qZSq+Ny4ErgeuC+Urejsb1fzo9vT+UcdAAxtWd3ohbF0sSInOfm9G/TB1NeOJxre9OyfApRDPEN4HjbT1UZX51I2gH4FnAgcU56GvhOh/PSv9l+o6IQ00cgRxyklCpjezzQS9J5kuYjLvCrA+cT89M3rzC81IPYHmd7TWAvYt7w0DK3eENgnkwatJbtx4mq2WOJjgqNbierAWtk0qDlViG+2/cQc4hPk7R+U1u5RvI1W9W2EUl9S92exYFjbI8g+ts/BNwvaedKA6yfSUSRvhUkHUScj/YiRjPeI2nVCmOrmyHA90ur3a2IQp+DGhtL56JMGtRMjjhIKVVC0nZE9fsXgaFEVfZ7gatt3yHpOOAd28MqDDP1QGXkypHAfxLfsTNtZ2Gmj5ikBYH7gSNt31F1PHVQEgECVgUm2n5V0iJEXYONgOuIGh5r2d66ukhTs+YaBmV5AeB44O/AdrbXk7Qh8GfbL1QVZx1J2h/oT0wPedj2PWX96cBY2zdUGV9PV85JfYFtbd8kqY/tGaVN7PeBs7PVZX1l4iClNNeVB4yniXnpdwPDbU+QNJ/tt0pBxIeAnWz/uspYU89UvmOfJK5z4ysO52OhzOfePm/MW6dRBLH8PA+RTG0U1VuJKBy7I7Cu7d9WF2lqJmk4MA9wXGN+t6Q9gXOAvW2PrDK+uulQbHJF4gF2HeABYmSUgZHArrYfqyrOOmg+J5Xl3k3dsLYEDifq24zMtsf1k4mDlNJcVYavWdIQopjdJOBrwC+JC/2BwGbE/M9TKws0pZQqVh5A+wLHNj2AzgPMLF0VhgHL2t6lyjjTuyT9B1FJ/hZiqtRVts8o25ax/VKV8dVRU6LmeNuvlnU7EIVynyda9L5g+6SqYqyLsq/nJabeNM5JfYhz0ixJJwIv56iDesrEQUqpEpLWJVq4HUXMjbsEWAY4yPaNHYd6ppTSx0knD6BX2j6zafsniSTrXbb/XkmQ6V+U39vmtodJ2pSoSbEY0fHiZkmHAHfYfqbSQGuik+PkGttDm7avAowDnPcU3fM+zkmNF0OzRyGkesniiCmlStj+FVEhvD/wLHFjdSawl6St8wKfUkqcZ3t/YDCwiaQHylxigD2AZzNp0F5sPwKcVX7+JTGV5EfAYEmNyvOZNGit5uNkA0kPStqxbNsWWCHvKVpmTuekIyStlEmD+soRBymlypQWeoOB9YExtg+WtKLt5ysOLaWUKtdh/nAfYFciYbAU0S1kxSrjS++fpH7AK8DXbd9ZdTx18h7HSR/bA6qMr07msK+XJM5Jua9rLBMHKaVKSRpEXHR2sj29MdSt6rhSSqkd5QNozyRpN2AH24Pe88Op2/I4mXtyX398ZOIgpVQ5SQvZnpLz4lJKac7yAbRnKl1HFrA9tepYPg7yOJl7cl9/fGTiIKWUUkqph8gH0JTeWx4nc0/u64+PTByklFJKKaWUUkqpS9lVIaWUUkoppZRSSl3KxEFKKaWUUkoppZS6lImDlFJKKaWUUkopdSkTBymllFJKKaWUUupSJg5SSimllFJKKaXUpUwcpJRSSimllFJKqUv/ACZC6nCgcuAUAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABA4AAAF2CAYAAADul9pLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAbxklEQVR4nO3de7zldV3v8fdHRlK8oTIhFwXyGtnFE6HdLbp4QeGch6cwM+xhh2N5VMyOGmqhaSc7pdHNE0WKdzmkaWYdlaN1TKPG0gzBQEC5ymhMAl6Rz/nj9yMX2/nO3sPsmTUzPJ+Pxzxmr/Vbl8/67b1gfq/1+/12dXcAAAAAtuZ2yx4AAAAA2H0JBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAMAuU1XnVdXDlz3HzlZVL66qT1fV1Uuc4eFVdfmynn93UVWXVtUPLXuO3Yl1AsD2Eg4AWBdb2xipqidV1ftuvtzd39Td713lcQ6vqq6qDTtp1J2qqu6T5FlJjuzuey17nt3B/P28oaqur6rPVNU5VfXjy55rd7Xahr0oBMCuJhwAcJuyC4LEfZJ8pruv2d477qmxZI2+tbvvnOSBSV6V5Her6peXOxIAsBbCAQC7zOInqVV1dFVtqqrPVtWnqupl883+ev57y/wJ9XdW1e2q6vlV9YmquqaqXl1Vd1t43J+al32mql6w4nlOraqzq+q1VfXZJE+an/sDVbWlqq6qqt+tqn0XHq+r6ueq6sKquq6qfqWq7ltV75/nPWvx9gv3+6Ek70py8Dz7q+brHzsfprGlqt5bVd+4Yp08p6r+KckNW4sHVfVNVfWuqvrXeV2dMl//dVX1W1V15fznt6rq6wbrvqvqfguXX1VVL56/fnhVXV5Vz57X71VVdXxVPaqq/mV+3lMW7nvqvA5ePa+f86rqqG1+82fd/enufk2Sn03yi1V1z/kx71ZVZ8zPfcV8uMc+82vcUlUPXnj+jVX1+ar6+vnysVX1ofl276+qbxmsg+H6WlgHp9R0mMmlVfWEFevr96vqL+bv7d9U1b3mx7i2qi6oqocs3P7gqvqTqtpcVZdU1dPXsv6q6jWZ4tOfzc/z7NXW6fwz9SvzTNdV1Tur6oCF5U+sr74/nrfivrerqudW1cfn5WdV1T3mZa+oqj9ZuO1La9pbpFabCYC9i3AAwLKcluS07r5rkvsmOWu+/vvmv/fv7jt39weSPGn+8wNJviHJnZP8bpJU1ZFJfj/JE5IclORuSQ5Z8VzHJTk7yf5JXpfkK0memeSAJN+Z5JgkP7fiPj+a5NuTPCzJs5OcnuQnk9w7yYOTPH7lC+rudyd5ZJIr59mfVFUPSPKGJCcn2ZjkHZk2ChfDw+OTPHp+zTcuPmZV3SXJu5P8ZZKDk9wvyTnz4ufN831bkm9NcnSS56+ca43uleQOmdbdLyX5w/n1fnuS703ygqo6YuH2j03yxkzr9G2Zvx/b4a1JNswzJ9NeCDdmen0PSfIjSX6mu7+Y5M255fr+sSR/1d3XzBvrf5zkvya5Z5I/SPK2QUBZbX3dK9PPxCFJTkxyelU9cMXzPn++zReTfCDJP8yXz07ysmTaGE/yZ0k+PD/WMUlOrqofXXisra6/7n5ikk8mecz8M/TrW199X+Mnkvx0kq9Psm+SX5hnOTLJK5I8MdPPzz2THLpwv6clOT7J98/Lr03ye/OyZyX55poOOfreJE9OcmJ39xpnAmAvIRwAsJ7+dP7Ud0tVbcm0QT/y5ST3q6oDuvv67v7bbdz2CUle1t0Xd/f1SX4xyQnzp/OPS/Jn3f2+7v5Spo3elRs2H+juP+3um7r78939we7+2+6+sbsvzbSx+f0r7vPr3f3Z7j4vyT8neef8/P+W5C8ybdyuxY8n+fPufld3fznJbyS5Y5LvWrjNb3f3Zd39+a3c/9gkV3f3b3b3F7r7uu4+d2G9vKi7r+nuzUlemGkD8db4cpKXzDO+MdPG8Gnz852X5KOZNrZv9r7ufkd3fyXJa1YsW9X8PJ9Oco+qOjDJo5Kc3N03zId5vDzJCfPNX7/wdTJtJL9+/vqkJH/Q3ed291e6+8xMG/UP28rTrmV9vaC7v9jdf5XkzzPFgpu9Zf7Z+UKStyT5Qne/el4Hb8pXfya+I8nG7n5Rd3+puy/OFGIWX8MOrb+teGV3/8v8M3RWpjiSTO+Pt3f3X88R5gVJblq431OSPK+7L5+Xn5rkcVW1obs/l2n9vCzJa5M8rbudWwHgNkg4AGA9Hd/d+9/8J1/7Kf6iJyd5QJILqurvq+rYbdz24CSfWLj8iUyfVh84L7vs5gXzxs5nVtz/ssULVfWAqnp7VV1d0+ELv5ppQ3nRpxa+/vxWLt95G/MOZ+/um+Z5FveKuGzlnRbcO8nH1/LY89cHr3GulT4zb8Qm0+tLtv2aF39jxOeS3KG24xwNVXX7THtg/GuSw5LcPslVC9HpDzJ9ep4k70myX1U9tKoOz7RR/JZ52WFJnrUiWN07W18Pq62va7v7hm0sX+vPxGGZDldZnOmUTD+vN9uh9bcVKx/v5llWvj9uyC3fH4clecvCnOdn2iPnwPn25ya5OEnlq3sFAXAbIxwAsBTdfWF3Pz7TxuFLk5xdVXfK1+4tkCRXZtrAudl9Mu3W/qkkV2Vh1+uqumOm3bFv8XQrLr8iyQVJ7j8fKnFKpg2jneEWs8/Hh987yRXbmG/RZZkOz1j1sTOtlysHt/1ckv0WLi/7Nz4cl+l7+HeZXuMXkxywEJ7u2t3flCRz0Dgr0+EKj8/0Cfp18+NclmlPif0X/uzX3W/YynOutr7uPv8Mjpav1WVJLlkx0126+1FrvP96HgpwVaaftyRJVe2XW74/LkvyyBWz3qG7r5hv/9QkX5dpPax6vgUA9k7CAQBLUVU/WVUb50/gt8xX35Rk8/z34sbyG5I8s6qOqKo7Z9pD4E3z+QDOTvKYqvqu+bwBp2b1CHCXJJ9Ncn1VPSjTifp2lrOSPLqqjpk/ZX9Wpo3k96/x/m9PclBVnTyf3O8uVfXQedkbkjy/ppMFHpDpMI3XDh7nQ0l+oqYTDj4iX3toxi5RVfeYTzr4e0le2t2f6e6rkrwzyW9W1V3nE/bdt6oWZ3x9psM+npCvHqaQTIcAPGXeG6Gq6k5V9ej53BArrWV9vbCq9p2P6T82yf++FS/z75JcV9NJL+84r/MHV9V3rPH+n8o4Fm2vs5McW1XfM78/XpRb/vvvfyV5SVUdlvz7iSePm79+QJIXZzrXxROTPLuqvm2d5gJgDyIcALAsj0hyXlVdn+lEiSfM5x/4XJKXJPmbeffph2U6+d1rMv3GhUuSfCHTSd0yH3//tEzH5V+V5Pok12TaOB/5hUzHyV+XacPzTev/8ibd/bFMG16/k+mY/sdkOvHdl9Z4/+uS/PB8v6uTXJjpJJHJtFG3Kck/JflIphP1vXjwUM+YH2NLpo3vP93uF7NjPjx/ry9K8jNJntndv7Sw/KcyndTvo5lO0Hd2ppNdJvn3XeZvyLTr/V8sXL8pyX/JdHLBa+fHf9JghtXW19XzY1yZ6SSaT+nuC7b3hc57SByb6ZCKSzJ93/8o04k71+J/ZAocW6rqF7b3+VfMcl6Sp2aKLVdlen2L5yk4LdPJGd9ZVdcl+dskD50Pm3htprjz4e6+MNOeOa8ZnHgSgL1YOTEuAHuTeY+ELZkOQ7hkyeOwh6iqhyd5bXcfuspNAeA2xx4HAOzxquoxVbXffHz6b2T6NPnS5U4FALB3EA4A2Bscl2n38iuT3D/TYQ92qQMAWAcOVQAAAACG7HEAAAAADAkHAAAAwNCGXflkBxxwQB9++OG78ikBAACAVXzwgx/8dHdv3NqyXRoODj/88GzatGlXPiUAAACwiqr6xGiZQxUAAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGNqw7AF2d4c/98+XPQIs1aW/9uhljwAAACyRPQ4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGBIOAAAAgCHhAAAAABgSDgAAAIAh4QAAAAAYEg4AAACAIeEAAAAAGFpTOKiqZ1bVeVX1z1X1hqq6Q1UdUVXnVtVFVfWmqtp3Zw8LAAAA7FqrhoOqOiTJ05Mc1d0PTrJPkhOSvDTJy7v7fkmuTfLknTkoAAAAsOut9VCFDUnuWFUbkuyX5KokP5jk7Hn5mUmOX/fpAAAAgKVaNRx09xVJfiPJJzMFg39L8sEkW7r7xvlmlyc5ZGcNCQAAACzHWg5VuHuS45IckeTgJHdK8oi1PkFVnVRVm6pq0+bNm2/1oAAAAMCut5ZDFX4oySXdvbm7v5zkzUm+O8n+86ELSXJokiu2dufuPr27j+ruozZu3LguQwMAAAC7xlrCwSeTPKyq9quqSnJMko8meU+Sx823OTHJW3fOiAAAAMCyrOUcB+dmOgniPyT5yHyf05M8J8nPV9VFSe6Z5IydOCcAAACwBBtWv0nS3b+c5JdXXH1xkqPXfSIAAABgt7HWX8cIAAAA3AYJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMDQhmUPALBTnXq3ZU8Ay3Pqvy17AgBgL2CPAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhtYUDqpq/6o6u6ouqKrzq+o7q+oeVfWuqrpw/vvuO3tYAAAAYNda6x4HpyX5y+5+UJJvTXJ+kucmOae775/knPkyAAAAsBdZNRxU1d2SfF+SM5Kku7/U3VuSHJfkzPlmZyY5fueMCAAAACzLWvY4OCLJ5iSvrKp/rKo/qqo7JTmwu6+ab3N1kgN31pAAAADAcqwlHGxI8h+SvKK7H5Lkhqw4LKG7O0lv7c5VdVJVbaqqTZs3b97ReQEAAIBdaC3h4PIkl3f3ufPlszOFhE9V1UFJMv99zdbu3N2nd/dR3X3Uxo0b12NmAAAAYBdZNRx099VJLquqB85XHZPko0neluTE+boTk7x1p0wIAAAALM2GNd7uaUleV1X7Jrk4yU9nig5nVdWTk3wiyY/tnBEBAACAZVlTOOjuDyU5aiuLjlnXaQAAAIDdylrOcQAAAADcRgkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMbVj2AAAAW/PNZ37zskeApfnIiR9Z9ggA/84eBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMDQmsNBVe1TVf9YVW+fLx9RVedW1UVV9aaq2nfnjQkAAAAsw/bscfCMJOcvXH5pkpd39/2SXJvkyes5GAAAALB8awoHVXVokkcn+aP5ciX5wSRnzzc5M8nxO2E+AAAAYInWusfBbyV5dpKb5sv3TLKlu2+cL1+e5JD1HQ0AAABYtlXDQVUdm+Sa7v7grXmCqjqpqjZV1abNmzffmocAAAAAlmQtexx8d5LHVtWlSd6Y6RCF05LsX1Ub5tscmuSKrd25u0/v7qO6+6iNGzeuw8gAAADArrJqOOjuX+zuQ7v78CQnJPm/3f2EJO9J8rj5ZicmeetOmxIAAABYiu35rQorPSfJz1fVRZnOeXDG+owEAAAA7C42rH6Tr+ru9yZ57/z1xUmOXv+RAAAAgN3FjuxxAAAAAOzlhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGhAMAAABgSDgAAAAAhoQDAAAAYEg4AAAAAIaEAwAAAGBIOAAAAACGVg0HVXXvqnpPVX20qs6rqmfM19+jqt5VVRfOf999548LAAAA7Epr2ePgxiTP6u4jkzwsyVOr6sgkz01yTnffP8k582UAAABgL7JqOOjuq7r7H+avr0tyfpJDkhyX5Mz5ZmcmOX4nzQgAAAAsyXad46CqDk/ykCTnJjmwu6+aF12d5MDBfU6qqk1VtWnz5s07MisAAACwi605HFTVnZP8SZKTu/uzi8u6u5P01u7X3ad391HdfdTGjRt3aFgAAABg11pTOKiq22eKBq/r7jfPV3+qqg6alx+U5JqdMyIAAACwLGv5rQqV5Iwk53f3yxYWvS3JifPXJyZ56/qPBwAAACzThjXc5ruTPDHJR6rqQ/N1pyT5tSRnVdWTk3wiyY/tlAkBAACApVk1HHT3+5LUYPEx6zsOAAAAsDvZrt+qAAAAANy2CAcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMbVj2AAAAwN7l/Ad947JHgKX6xgvOX/YI68oeBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAwJBwAAAMCQcAAAAAAMCQcAAADAkHAAAAAADO1QOKiqR1TVx6rqoqp67noNBQAAAOwebnU4qKp9kvxekkcmOTLJ46vqyPUaDAAAAFi+Hdnj4OgkF3X3xd39pSRvTHLc+owFAAAA7A52JBwckuSyhcuXz9cBAAAAe4kNO/sJquqkJCfNF6+vqo/t7Odkr3NAkk8ve4jbqnrpsidgD+f9u0wvrGVPwJ7N+3eJ6knev+wQ799lqz3yPXzYaMGOhIMrktx74fKh83W30N2nJzl9B56H27iq2tTdRy17DmD7ef/Cnsv7F/Zc3r+stx05VOHvk9y/qo6oqn2TnJDkbeszFgAAALA7uNV7HHT3jVX135L8nyT7JPnj7j5v3SYDAAAAlm6HznHQ3e9I8o51mgVGHOoCey7vX9hzef/Cnsv7l3VV3b3sGQAAAIDd1I6c4wAAAADYywkHAAAAwJBwAMC6q9ozf3kxAABfyzkO2K1U1bckOTjJJ5Oc335AYY9RVY9J8rnuPme+XEnifQx7hqo6IsnhSS5Jcnl337jciYDtVVW36+6blj0Hex/hgN1GVR2U5C1Jrkhy3yQndveH/QcQdn9VddckH01ydZJzk/xhd3+oqvbp7q8sdzpgNfP/g9+U5KYk1yd5fXe/frlTAWu1+O/lqrpdpm5vQ491Ixyw26iqM5Jc0t0vrqrnJTksSSX5eJI3dvely5wP2Lqqqu7uqnpGkrskuSbJY5K8P8m3JfnZ7v7XJY4IrKKqXpnkwu7+1ap6VJKXJ3lsd39syaMBa1BVv5Nk3yTP7+7N83XiPevGOQ7YLVTVoUkelORl81VPzLTnwTuSfH2Sk5czGbCahU80/ibJMUk+nOR5SY5P8j1Jfng5kwFrUVWHZDpE4TVJ0t3vSPLOJP9pXn6fqjpmaQMC21RVRyd5XJJO8u6q+u9JcnM0qKo7LnE89hLCAbuF7r48ySO7+3NVdZckZ3b3C7v7LUlOTfKAqrrvUocEtqm7NyV5bZIDkvxLko1J/meSE6rqkcucDRjr7iuSPC3JtQtXvyrJg+evfzvJN+zisYDtc1p3PyXJ05N8T1W9v6r+47zsqVX1oCXOxl7AoQrs9uZPOV7S3Q9b9izAts3/MHl6kocm+X/dfXJV3be7P77k0YBtuPmQo/nr2ye5Y5LTk1yU5Oju/pFlzgdsW1VtuPmEpvN7+CeS/FSSg5Ls2933W+Z87Pk2LHsA2Jaq2i/T4QvPWfYswOq6+4Kqenem347y7Pnqi5c4ErAGiydR6+4vJ/lyVV2Z5JQkP7i0wYA1WfwtKPN7+MyqenOSK5P856UNxl5DOGC3VVX7JLlPkjO6+y+XPQ+wNt395qo6p7u/tPgJCLDHOT3J57v7vcseBLhVHpvkXf4dzXpwqAK7vcXdJwGAXcevRIY91/wh3J26+7PLnoU9n3AAAAAADPmtCgAAAMCQcAAAAAAMCQcAAADAkHAAAAAADAkHAAAAwJBwAAAAAAz9f4NsWYSr5DvLAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "# generate color map\n", - "cmap = plt.cm.tab10\n", - "colors = cmap(np.arange(len(df)) % cmap.N)\n", - "\n", - "# column barplot\n", - "columns = []\n", - "for colname, hist in stan.histograms.items():\n", - " fig, ax = plt.subplots(figsize=(18, 6))\n", - " \n", - " ax.bar(np.arange(len(hist['x'])), hist['y'], color=colors)\n", - " ax.set_xticks(np.arange(len(hist['x'])))\n", - " ax.set_xticklabels(hist['x'], rotation=60)\n", - " ax.set_title(f\"Histogram for column {colname}\")\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This way, it is fairly easy to understand how imbalanced the target distribution might be, along with a quick pass to search for outliers, for example.\n", - "\n", - "# Final thoughts\n", - "\n", - "Lightwood automatically tries to leverage all the information provided by a `StatisticalAnalysis` instance when generating a predictor for any given dataset and problem definition. Additionally, it is a valuable tool to explore the data as a user. \n", - "\n", - "Finally, be aware that you can access these insights when creating custom blocks (e.g. encoders, mixers, or analyzers) if you want, you just need to pass whatever is necessary as arguments to these blocks inside the Json AI object.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.html b/docs/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.html deleted file mode 100644 index f468074d3..000000000 --- a/docs/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.html +++ /dev/null @@ -1,1101 +0,0 @@ - - - - - - - - - - Tutorial - Time series forecasting — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- -
    - -
  • »
  • - -
  • Tutorial - Time series forecasting
  • - - -
  • - - - View page source - - -
  • - -
- - -
-
-
-
- - - -
-

Tutorial - Time series forecasting

-
-

Introduction

-

Time series are an ubiquitous type of data in all types of processes. Producing forecasts for them can be highly valuable in domains like retail or industrial manufacture, among many others.

-

Lightwood supports time series forecasting (both univariate and multivariate inputs), handling many of the pain points commonly associated with setting up a manual time series predictive pipeline.

-

In this tutorial, we will train a lightwood predictor and analyze its forecasts for the task of counting sunspots in monthly intervals.

-
-
-

Load data

-

Let’s begin by loading the dataset and looking at it:

-
-
[5]:
-
-
-
-import pandas as pd
-
-df = pd.read_csv("https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/monthly_sunspots/data.csv")
-df
-
-
-
-
-
[5]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
MonthSunspots
01749-0158.0
11749-0262.6
21749-0370.0
31749-0455.7
41749-0585.0
.........
28151983-0871.8
28161983-0950.3
28171983-1055.8
28181983-1133.3
28191983-1233.4
-

2820 rows × 2 columns

-
-
-

This is a very simple dataset. It’s got a single column that specifies the month in which the measurement was done, and then in the ‘Sunspots’ column we have the actual quantity we are interested in forecasting. As such, we can characterize this as a univariate time series problem.

-
-
-

Define the predictive task

-

We will use Lightwood high level methods to state what we want to predict. As this is a time series task (because we want to leverage the notion of time to predict), we need to specify a set of arguments that will activate Lightwood’s time series pipeline:

-
-
[8]:
-
-
-
-from lightwood.api.high_level import ProblemDefinition
-
-
-
-
-
[21]:
-
-
-
-tss = {'nr_predictions': 6,   # the predictor will learn to forecast what the next semester counts will look like (6 data points at monthly intervals -> 6 months)
-       'order_by': ['Month'], # what column is used to order the entire datset
-       'window': 12           # how many past values to consider for emitting predictions
-      }
-
-pdef = ProblemDefinition.from_dict({'target': 'Sunspots',         # specify the column to forecast
-                                    'timeseries_settings': tss    # pass along all time series specific parameters
-                                   })
-
-
-
-

Now, let’s do a very simple train-test split, leaving 10% of the data to check the forecasts that our predictor will produce:

-
-
[22]:
-
-
-
-cutoff = int(len(df)*0.9)
-
-train = df[:cutoff]
-test = df[cutoff:]
-
-print(train.shape, test.shape)
-
-
-
-
-
-
-
-
-(2538, 2) (282, 2)
-
-
-
-
-

Generate the predictor object

-

Now, we can generate code for a machine learning model by using our problem definition and the data:

-
-
[23]:
-
-
-
-from lightwood.api.high_level import (
-    json_ai_from_problem,
-    code_from_json_ai,
-    predictor_from_code
-)
-
-json_ai = json_ai_from_problem(df, problem_definition=pdef)
-code = code_from_json_ai(json_ai)
-predictor = predictor_from_code(code)
-
-# uncomment this to see the generated code:
-# print(code)
-
-
-
-
-
-
-
-
-INFO:lightwood-46866:Dropping features: []
-INFO:lightwood-46866:Analyzing a sample of 2467
-INFO:lightwood-46866:from a total population of 2820, this is equivalent to 87.5% of your data.
-INFO:lightwood-46866:Using 15 processes to deduct types.
-INFO:lightwood-46866:Starting statistical analysis
-INFO:lightwood-46866:Finished statistical analysis
-
-
-
-
[23]:
-
-
-
-
-<A2JDXXBL9A1E16341560437535849.Predictor at 0x15685d970>
-
-
-
-
-

Train

-

Okay, everything is ready now for our predictor to learn based on the training data we will provide.

-

Internally, lightwood cleans and reshapes the data, featurizes measurements and timestamps, and comes up with a handful of different models that will be evaluated to keep the one that produces the best forecasts.

-

Let’s train the predictor. This should take a couple of minutes, at most:

-
-
[27]:
-
-
-
-predictor.learn(train)
-
-
-
-
-
-
-
-
-INFO:lightwood-46866:Dropping features: []
-INFO:lightwood-46866:Performing statistical analysis on data
-INFO:lightwood-46866:Starting statistical analysis
-INFO:lightwood-46866:Finished statistical analysis
-INFO:lightwood-46866:Cleaning the data
-INFO:lightwood-46866:Transforming timeseries data
-INFO:lightwood-46866:Using 15 processes to reshape.
-INFO:lightwood-46866:Splitting the data into train/test
-INFO:lightwood-46866:Preparing the encoders
-INFO:lightwood-46866:Encoder prepping dict length of: 1
-INFO:lightwood-46866:Done running for: Sunspots
-INFO:lightwood-46866:time series encoder epoch [1/100000] average_loss = 0.020042672178201507
-INFO:lightwood-46866:time series encoder epoch [2/100000] average_loss = 0.0077215013273975305
-INFO:lightwood-46866:time series encoder epoch [3/100000] average_loss = 0.0064399814919421546
-INFO:lightwood-46866:time series encoder epoch [4/100000] average_loss = 0.005441865690967493
-INFO:lightwood-46866:time series encoder epoch [5/100000] average_loss = 0.005300704742732801
-INFO:lightwood-46866:time series encoder epoch [6/100000] average_loss = 0.004992981385766414
-INFO:lightwood-46866:time series encoder epoch [7/100000] average_loss = 0.00491229374157755
-INFO:lightwood-46866:time series encoder epoch [8/100000] average_loss = 0.004856080601089879
-INFO:lightwood-46866:time series encoder epoch [9/100000] average_loss = 0.004799575188703704
-INFO:lightwood-46866:time series encoder epoch [10/100000] average_loss = 0.0047617426566910325
-INFO:lightwood-46866:time series encoder epoch [11/100000] average_loss = 0.004732183615366618
-INFO:lightwood-46866:time series encoder epoch [12/100000] average_loss = 0.004704843226232026
-INFO:lightwood-46866:time series encoder epoch [13/100000] average_loss = 0.004697896095744351
-INFO:lightwood-46866:time series encoder epoch [14/100000] average_loss = 0.004687661141679998
-INFO:lightwood-46866:time series encoder epoch [15/100000] average_loss = 0.004655592012823674
-INFO:lightwood-46866:time series encoder epoch [16/100000] average_loss = 0.004595928704529478
-INFO:lightwood-46866:time series encoder epoch [17/100000] average_loss = 0.004568418233018173
-INFO:lightwood-46866:time series encoder epoch [18/100000] average_loss = 0.004558674494425456
-INFO:lightwood-46866:time series encoder epoch [19/100000] average_loss = 0.004570525518634863
-INFO:lightwood-46866:time series encoder epoch [20/100000] average_loss = 0.004572713087525284
-INFO:lightwood-46866:time series encoder epoch [21/100000] average_loss = 0.004563712864591364
-INFO:lightwood-46866:time series encoder epoch [22/100000] average_loss = 0.004498099365778136
-INFO:lightwood-46866:time series encoder epoch [23/100000] average_loss = 0.004449873953534846
-INFO:lightwood-46866:time series encoder epoch [24/100000] average_loss = 0.004484773205037703
-INFO:lightwood-46866:time series encoder epoch [25/100000] average_loss = 0.004398583738427413
-INFO:lightwood-46866:time series encoder epoch [26/100000] average_loss = 0.004340721536100957
-INFO:lightwood-46866:time series encoder epoch [27/100000] average_loss = 0.004394709227377908
-INFO:lightwood-46866:time series encoder epoch [28/100000] average_loss = 0.004414253694969311
-INFO:lightwood-46866:time series encoder epoch [29/100000] average_loss = 0.0043628366892797905
-INFO:lightwood-46866:time series encoder epoch [30/100000] average_loss = 0.0042474141246394105
-INFO:lightwood-46866:time series encoder epoch [31/100000] average_loss = 0.004357850760744329
-INFO:lightwood-46866:time series encoder epoch [32/100000] average_loss = 0.004315985190240961
-INFO:lightwood-46866:time series encoder epoch [33/100000] average_loss = 0.00410254764975163
-INFO:lightwood-46866:time series encoder epoch [34/100000] average_loss = 0.004112129096399274
-INFO:lightwood-46866:time series encoder epoch [35/100000] average_loss = 0.004205447932084401
-INFO:lightwood-46866:time series encoder epoch [36/100000] average_loss = 0.004242659451668723
-INFO:lightwood-46866:time series encoder epoch [37/100000] average_loss = 0.0042895584252842685
-INFO:lightwood-46866:time series encoder epoch [38/100000] average_loss = 0.00440603481572971
-INFO:lightwood-46866:time series encoder epoch [39/100000] average_loss = 0.004132882597153647
-INFO:lightwood-46866:time series encoder epoch [40/100000] average_loss = 0.0040611259769975094
-INFO:lightwood-46866:time series encoder epoch [41/100000] average_loss = 0.00396897013772998
-INFO:lightwood-46866:time series encoder epoch [42/100000] average_loss = 0.003915625183205856
-INFO:lightwood-46866:time series encoder epoch [43/100000] average_loss = 0.003940282500626748
-INFO:lightwood-46866:time series encoder epoch [44/100000] average_loss = 0.004178977953760247
-INFO:lightwood-46866:Featurizing the data
-INFO:lightwood-46866:Training the mixers
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-WARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead
-WARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead
-WARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead
-WARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead
-WARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead
-WARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py:116: UserWarning: torch.cuda.amp.GradScaler is enabled, but CUDA is not available.  Disabling.
-  warnings.warn("torch.cuda.amp.GradScaler is enabled, but CUDA is not available.  Disabling.")
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/pytorch_ranger/ranger.py:172: UserWarning: This overload of addcmul_ is deprecated:
-        addcmul_(Number value, Tensor tensor1, Tensor tensor2)
-Consider using one of the following signatures instead:
-        addcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at  ../torch/csrc/utils/python_arg_parser.cpp:1005.)
-  exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)
-INFO:lightwood-46866:Loss of 0.539688229560852 with learning rate 0.0001
-INFO:lightwood-46866:Loss of 0.7796856760978699 with learning rate 0.00014
-INFO:lightwood-46866:Found learning rate of: 0.0001
-DEBUG:lightwood-46866:Loss @ epoch 1: 0.6908893585205078
-DEBUG:lightwood-46866:Loss @ epoch 2: 0.6882499903440475
-DEBUG:lightwood-46866:Loss @ epoch 3: 0.6850549429655075
-DEBUG:lightwood-46866:Loss @ epoch 4: 0.6813623607158661
-DEBUG:lightwood-46866:Loss @ epoch 5: 0.6772531270980835
-DEBUG:lightwood-46866:Loss @ epoch 6: 0.6728083938360214
-DEBUG:lightwood-46866:Loss @ epoch 7: 0.6652606427669525
-DEBUG:lightwood-46866:Loss @ epoch 8: 0.6601350754499435
-DEBUG:lightwood-46866:Loss @ epoch 9: 0.6548376232385635
-DEBUG:lightwood-46866:Loss @ epoch 10: 0.6494599282741547
-DEBUG:lightwood-46866:Loss @ epoch 11: 0.6441417187452316
-DEBUG:lightwood-46866:Loss @ epoch 12: 0.6389893442392349
-DEBUG:lightwood-46866:Loss @ epoch 13: 0.6309126764535904
-DEBUG:lightwood-46866:Loss @ epoch 14: 0.6257634907960892
-DEBUG:lightwood-46866:Loss @ epoch 15: 0.6205589026212692
-DEBUG:lightwood-46866:Loss @ epoch 16: 0.6152833849191666
-DEBUG:lightwood-46866:Loss @ epoch 17: 0.6099573820829391
-DEBUG:lightwood-46866:Loss @ epoch 18: 0.6046575754880905
-DEBUG:lightwood-46866:Loss @ epoch 19: 0.5962131917476654
-DEBUG:lightwood-46866:Loss @ epoch 20: 0.5909084677696228
-DEBUG:lightwood-46866:Loss @ epoch 21: 0.5856661349534988
-DEBUG:lightwood-46866:Loss @ epoch 22: 0.5805662572383881
-DEBUG:lightwood-46866:Loss @ epoch 23: 0.575617328286171
-DEBUG:lightwood-46866:Loss @ epoch 24: 0.5707968175411224
-DEBUG:lightwood-46866:Loss @ epoch 25: 0.5632813721895218
-DEBUG:lightwood-46866:Loss @ epoch 26: 0.5587586611509323
-DEBUG:lightwood-46866:Loss @ epoch 27: 0.554344117641449
-DEBUG:lightwood-46866:Loss @ epoch 28: 0.5499386340379715
-DEBUG:lightwood-46866:Loss @ epoch 29: 0.5455891937017441
-DEBUG:lightwood-46866:Loss @ epoch 30: 0.5413248538970947
-DEBUG:lightwood-46866:Loss @ epoch 31: 0.5345934927463531
-DEBUG:lightwood-46866:Loss @ epoch 32: 0.5304456949234009
-DEBUG:lightwood-46866:Loss @ epoch 33: 0.526373103260994
-DEBUG:lightwood-46866:Loss @ epoch 34: 0.5223924517631531
-DEBUG:lightwood-46866:Loss @ epoch 35: 0.5184392035007477
-DEBUG:lightwood-46866:Loss @ epoch 36: 0.5145991444587708
-DEBUG:lightwood-46866:Loss @ epoch 37: 0.5086493343114853
-DEBUG:lightwood-46866:Loss @ epoch 38: 0.5050476491451263
-DEBUG:lightwood-46866:Loss @ epoch 39: 0.5015637576580048
-DEBUG:lightwood-46866:Loss @ epoch 40: 0.49815742671489716
-DEBUG:lightwood-46866:Loss @ epoch 41: 0.4948585033416748
-DEBUG:lightwood-46866:Loss @ epoch 42: 0.49173182249069214
-DEBUG:lightwood-46866:Loss @ epoch 43: 0.48690974712371826
-DEBUG:lightwood-46866:Loss @ epoch 44: 0.4839773178100586
-DEBUG:lightwood-46866:Loss @ epoch 45: 0.4811210632324219
-DEBUG:lightwood-46866:Loss @ epoch 46: 0.4783552885055542
-DEBUG:lightwood-46866:Loss @ epoch 47: 0.4757150560617447
-DEBUG:lightwood-46866:Loss @ epoch 48: 0.47318898141384125
-DEBUG:lightwood-46866:Loss @ epoch 49: 0.46942955255508423
-DEBUG:lightwood-46866:Loss @ epoch 50: 0.4671967923641205
-DEBUG:lightwood-46866:Loss @ epoch 51: 0.4650762975215912
-DEBUG:lightwood-46866:Loss @ epoch 52: 0.4630257934331894
-DEBUG:lightwood-46866:Loss @ epoch 53: 0.46110378205776215
-DEBUG:lightwood-46866:Loss @ epoch 54: 0.45930930972099304
-DEBUG:lightwood-46866:Loss @ epoch 55: 0.45666399598121643
-DEBUG:lightwood-46866:Loss @ epoch 56: 0.4550795406103134
-DEBUG:lightwood-46866:Loss @ epoch 57: 0.4535674601793289
-DEBUG:lightwood-46866:Loss @ epoch 58: 0.45216208696365356
-DEBUG:lightwood-46866:Loss @ epoch 59: 0.45088090002536774
-DEBUG:lightwood-46866:Loss @ epoch 60: 0.4496418982744217
-DEBUG:lightwood-46866:Loss @ epoch 61: 0.4477883279323578
-DEBUG:lightwood-46866:Loss @ epoch 62: 0.4467353969812393
-DEBUG:lightwood-46866:Loss @ epoch 63: 0.4457828402519226
-DEBUG:lightwood-46866:Loss @ epoch 64: 0.4448719322681427
-DEBUG:lightwood-46866:Loss @ epoch 65: 0.44403648376464844
-DEBUG:lightwood-46866:Loss @ epoch 66: 0.44328153133392334
-DEBUG:lightwood-46866:Loss @ epoch 67: 0.44207488000392914
-DEBUG:lightwood-46866:Loss @ epoch 68: 0.4413738548755646
-DEBUG:lightwood-46866:Loss @ epoch 69: 0.44084450602531433
-DEBUG:lightwood-46866:Loss @ epoch 70: 0.4403578191995621
-DEBUG:lightwood-46866:Loss @ epoch 71: 0.4398685395717621
-DEBUG:lightwood-46866:Loss @ epoch 72: 0.43935835361480713
-DEBUG:lightwood-46866:Loss @ epoch 73: 0.43840254843235016
-DEBUG:lightwood-46866:Loss @ epoch 74: 0.4378361850976944
-DEBUG:lightwood-46866:Loss @ epoch 75: 0.4375789165496826
-DEBUG:lightwood-46866:Loss @ epoch 76: 0.43739429116249084
-DEBUG:lightwood-46866:Loss @ epoch 77: 0.4372607320547104
-DEBUG:lightwood-46866:Loss @ epoch 78: 0.43708017468452454
-DEBUG:lightwood-46866:Loss @ epoch 79: 0.4364318400621414
-DEBUG:lightwood-46866:Loss @ epoch 80: 0.43584632873535156
-DEBUG:lightwood-46866:Loss @ epoch 81: 0.4356466382741928
-DEBUG:lightwood-46866:Loss @ epoch 82: 0.4355204701423645
-DEBUG:lightwood-46866:Loss @ epoch 83: 0.43557313084602356
-DEBUG:lightwood-46866:Loss @ epoch 84: 0.43554021418094635
-DEBUG:lightwood-46866:Loss @ epoch 85: 0.43514105677604675
-DEBUG:lightwood-46866:Loss @ epoch 86: 0.43462760746479034
-DEBUG:lightwood-46866:Loss @ epoch 87: 0.43442972004413605
-DEBUG:lightwood-46866:Loss @ epoch 88: 0.43443459272384644
-DEBUG:lightwood-46866:Loss @ epoch 89: 0.4344787895679474
-DEBUG:lightwood-46866:Loss @ epoch 90: 0.4345344454050064
-DEBUG:lightwood-46866:Loss @ epoch 1: 0.329136921600862
-DEBUG:lightwood-46866:Loss @ epoch 2: 0.3284675722772425
-DEBUG:lightwood-46866:Loss @ epoch 3: 0.33007449995387683
-DEBUG:lightwood-46866:Loss @ epoch 4: 0.32765168764374475
-DEBUG:lightwood-46866:Loss @ epoch 5: 0.3260806582190774
-DEBUG:lightwood-46866:Loss @ epoch 6: 0.3272357068278573
-DEBUG:lightwood-46866:Loss @ epoch 7: 0.3281749730760401
-INFO:lightwood-46866:Started fitting LGBM models for array prediction
-INFO:lightwood-46866:Started fitting LGBM model
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-INFO:lightwood-46866:A single GBM iteration takes 0.1 seconds
-INFO:lightwood-46866:Training GBM (<module 'lightgbm' from '/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/__init__.py'>) with 1325 iterations given 165.66666666666666 seconds constraint
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-INFO:lightwood-46866:Lightgbm model contains 1 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 2 weak estimators
-INFO:lightwood-46866:Started fitting LGBM model
-INFO:lightwood-46866:A single GBM iteration takes 0.1 seconds
-INFO:lightwood-46866:Training GBM (<module 'lightgbm' from '/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/__init__.py'>) with 1325 iterations given 165.66666666666666 seconds constraint
-INFO:lightwood-46866:Lightgbm model contains 1 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 2 weak estimators
-INFO:lightwood-46866:Started fitting LGBM model
-INFO:lightwood-46866:A single GBM iteration takes 0.1 seconds
-INFO:lightwood-46866:Training GBM (<module 'lightgbm' from '/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/__init__.py'>) with 1325 iterations given 165.66666666666666 seconds constraint
-INFO:lightwood-46866:Lightgbm model contains 1 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 2 weak estimators
-INFO:lightwood-46866:Started fitting LGBM model
-INFO:lightwood-46866:A single GBM iteration takes 0.1 seconds
-INFO:lightwood-46866:Training GBM (<module 'lightgbm' from '/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/__init__.py'>) with 1325 iterations given 165.66666666666666 seconds constraint
-INFO:lightwood-46866:Lightgbm model contains 1 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 2 weak estimators
-INFO:lightwood-46866:Started fitting LGBM model
-INFO:lightwood-46866:A single GBM iteration takes 0.1 seconds
-INFO:lightwood-46866:Training GBM (<module 'lightgbm' from '/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/__init__.py'>) with 1325 iterations given 165.66666666666666 seconds constraint
-INFO:lightwood-46866:Lightgbm model contains 1 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 2 weak estimators
-INFO:lightwood-46866:Started fitting LGBM model
-INFO:lightwood-46866:A single GBM iteration takes 0.1 seconds
-INFO:lightwood-46866:Training GBM (<module 'lightgbm' from '/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/__init__.py'>) with 1325 iterations given 165.66666666666666 seconds constraint
-INFO:lightwood-46866:Lightgbm model contains 1 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 2 weak estimators
-INFO:lightwood-46866:Ensembling the mixer
-INFO:lightwood-46866:Mixer: Neural got accuracy: 0.19612012470445245
-INFO:lightwood-46866:Mixer: LightGBMArray got accuracy: 0.21013741093675975
-INFO:lightwood-46866:Picked best mixer: LightGBMArray
-INFO:lightwood-46866:Analyzing the ensemble of mixers
-INFO:lightwood-46866:Adjustment on validation requested.
-INFO:lightwood-46866:Updating the mixers
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py:116: UserWarning: torch.cuda.amp.GradScaler is enabled, but CUDA is not available.  Disabling.
-  warnings.warn("torch.cuda.amp.GradScaler is enabled, but CUDA is not available.  Disabling.")
-DEBUG:lightwood-46866:Loss @ epoch 1: 0.33339183280865353
-DEBUG:lightwood-46866:Loss @ epoch 2: 0.3303144524494807
-DEBUG:lightwood-46866:Loss @ epoch 3: 0.330986554423968
-DEBUG:lightwood-46866:Loss @ epoch 4: 0.3315189927816391
-DEBUG:lightwood-46866:Loss @ epoch 5: 0.33072087665398914
-DEBUG:lightwood-46866:Loss @ epoch 6: 0.33309372514486313
-INFO:lightwood-46866:Updating array of LGBM models...
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument
-  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
-INFO:lightwood-46866:Model now has a total of 3 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 3 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 3 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 3 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 3 weak estimators
-INFO:lightwood-46866:Updating lightgbm model with 1 iterations
-INFO:lightwood-46866:Model now has a total of 3 weak estimators
-
-
-
-
-

Predict

-

Once the predictor has trained, we can use it to generate 6-month forecasts for each of the test set data points:

-
-
[28]:
-
-
-
-forecasts = predictor.predict(test)
-
-
-
-
-
-
-
-
-INFO:lightwood-46866:Dropping features: []
-INFO:lightwood-46866:Cleaning the data
-INFO:lightwood-46866:Transforming timeseries data
-INFO:lightwood-46866:Featurizing the data
-/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/pandas/core/indexing.py:1637: SettingWithCopyWarning:
-A value is trying to be set on a copy of a slice from a DataFrame
-
-See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
-  self._setitem_single_block(indexer, value, name)
-INFO:lightwood-46866:AccStats.explain() has not been implemented, no modifications will be done to the data insights.
-INFO:lightwood-46866:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.
-
-
-

Let’s check how a single row might look:

-
-
[37]:
-
-
-
-forecasts.iloc[[10]]
-
-
-
-
-
[37]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - -
predictiontruthorder_Monthconfidencelowerupperanomaly
10[51.28799878891615, 46.76867159945164, 52.0899...51.0[-272577600.0, -269899200.0, -267220800.0, -26...[0.24, 0.24, 0.24, 0.24, 0.24, 0.24][30.80746268275371, 26.288135493289204, 31.609...[71.76853489507859, 67.24920770561408, 72.5704...False
-
-
-

You’ll note that the point prediction has associated lower and upper bounds that are a function of the estimated confidence the model has on its own output. Apart from this, order_Month yields the timestamps of each prediction, truth lets us know what is the one-step-ahead observed value (if it exists at all). Finally, the anomaly tag will let you know if the observed value falls outside of the predicted region.

-
-
-

Visualizing a forecast

-

Okay, time series are much easier to appreciate through plots. Let’s make one:

-

NOTE: We will use matplotlib to generate a simple plot of these forecasts. If you want to run this notebook locally, you will need to pip install matplotlib for the following code to work.

-
-
[38]:
-
-
-
-import matplotlib.pyplot as plt
-
-
-
-
-
[69]:
-
-
-
-plt.figure(figsize=(12, 8))
-plt.plot(forecasts['truth'].iloc[-24:], color='green', label='observed series')
-plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['prediction'], color='purple', label='point prediction')
-plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['lower'], color='grey')
-plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['upper'], color='grey')
-plt.xlabel('timestep')
-plt.ylabel('# sunspots')
-plt.title("Forecasted amount of sunspots for the next semester")
-plt.legend()
-plt.show()
-
-
-
-
-
-
-
-../../_images/tutorials_tutorial_time_series_Tutorial_-_Training_a_time_series_predictor_17_0.png -
-
-
-
-

Conclusion

-

In this tutorial, we have gone through how you can train a machine learning model with Lightwood to produce forecasts for a univariate time series task.

-

There are additional parameters to further customize your timeseries settings and/or prediction insights, so be sure to check the rest of the documentation.

-
-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.ipynb b/docs/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.ipynb deleted file mode 100644 index 17eb5d49c..000000000 --- a/docs/tutorials/tutorial_time_series/Tutorial - Training a time series predictor.ipynb +++ /dev/null @@ -1,751 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tutorial - Time series forecasting\n", - "\n", - "## Introduction\n", - "\n", - "Time series are an ubiquitous type of data in all types of processes. Producing forecasts for them can be highly valuable in domains like retail or industrial manufacture, among many others.\n", - "\n", - "Lightwood supports time series forecasting (both univariate and multivariate inputs), handling many of the pain points commonly associated with setting up a manual time series predictive pipeline. \n", - "\n", - "In this tutorial, we will train a lightwood predictor and analyze its forecasts for the task of counting sunspots in monthly intervals.\n", - "\n", - "## Load data\n", - "\n", - "Let's begin by loading the dataset and looking at it:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
MonthSunspots
01749-0158.0
11749-0262.6
21749-0370.0
31749-0455.7
41749-0585.0
.........
28151983-0871.8
28161983-0950.3
28171983-1055.8
28181983-1133.3
28191983-1233.4
\n", - "

2820 rows × 2 columns

\n", - "
" - ], - "text/plain": [ - " Month Sunspots\n", - "0 1749-01 58.0\n", - "1 1749-02 62.6\n", - "2 1749-03 70.0\n", - "3 1749-04 55.7\n", - "4 1749-05 85.0\n", - "... ... ...\n", - "2815 1983-08 71.8\n", - "2816 1983-09 50.3\n", - "2817 1983-10 55.8\n", - "2818 1983-11 33.3\n", - "2819 1983-12 33.4\n", - "\n", - "[2820 rows x 2 columns]" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import pandas as pd\n", - "\n", - "df = pd.read_csv(\"https://raw.githubusercontent.com/mindsdb/benchmarks/main/benchmarks/datasets/monthly_sunspots/data.csv\")\n", - "df" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This is a very simple dataset. It's got a single column that specifies the month in which the measurement was done, and then in the 'Sunspots' column we have the actual quantity we are interested in forecasting. As such, we can characterize this as a univariate time series problem.\n", - "\n", - "## Define the predictive task\n", - "\n", - "We will use Lightwood high level methods to state what we want to predict. As this is a time series task (because we want to leverage the notion of time to predict), we need to specify a set of arguments that will activate Lightwood's time series pipeline:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.api.high_level import ProblemDefinition" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "tss = {'nr_predictions': 6, # the predictor will learn to forecast what the next semester counts will look like (6 data points at monthly intervals -> 6 months)\n", - " 'order_by': ['Month'], # what column is used to order the entire datset\n", - " 'window': 12 # how many past values to consider for emitting predictions\n", - " }\n", - "\n", - "pdef = ProblemDefinition.from_dict({'target': 'Sunspots', # specify the column to forecast\n", - " 'timeseries_settings': tss # pass along all time series specific parameters\n", - " })" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, let's do a very simple train-test split, leaving 10% of the data to check the forecasts that our predictor will produce:" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(2538, 2) (282, 2)\n" - ] - } - ], - "source": [ - "cutoff = int(len(df)*0.9)\n", - "\n", - "train = df[:cutoff]\n", - "test = df[cutoff:]\n", - "\n", - "print(train.shape, test.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Generate the predictor object\n", - "\n", - "Now, we can generate code for a machine learning model by using our problem definition and the data:" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Analyzing a sample of 2467\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:from a total population of 2820, this is equivalent to 87.5% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Using 15 processes to deduct types.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Finished statistical analysis\u001b[0m\n" - ] - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from lightwood.api.high_level import (\n", - " json_ai_from_problem,\n", - " code_from_json_ai,\n", - " predictor_from_code\n", - ")\n", - "\n", - "json_ai = json_ai_from_problem(df, problem_definition=pdef)\n", - "code = code_from_json_ai(json_ai)\n", - "predictor = predictor_from_code(code)\n", - "\n", - "# uncomment this to see the generated code:\n", - "# print(code)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Train\n", - "\n", - "Okay, everything is ready now for our predictor to learn based on the training data we will provide.\n", - "\n", - "Internally, lightwood cleans and reshapes the data, featurizes measurements and timestamps, and comes up with a handful of different models that will be evaluated to keep the one that produces the best forecasts.\n", - "\n", - "Let's train the predictor. This should take a couple of minutes, at most:" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Performing statistical analysis on data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Transforming timeseries data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Using 15 processes to reshape.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Splitting the data into train/test\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Preparing the encoders\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Encoder prepping dict length of: 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Done running for: Sunspots\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [1/100000] average_loss = 0.020042672178201507\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [2/100000] average_loss = 0.0077215013273975305\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [3/100000] average_loss = 0.0064399814919421546\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [4/100000] average_loss = 0.005441865690967493\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [5/100000] average_loss = 0.005300704742732801\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [6/100000] average_loss = 0.004992981385766414\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [7/100000] average_loss = 0.00491229374157755\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [8/100000] average_loss = 0.004856080601089879\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [9/100000] average_loss = 0.004799575188703704\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [10/100000] average_loss = 0.0047617426566910325\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [11/100000] average_loss = 0.004732183615366618\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [12/100000] average_loss = 0.004704843226232026\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [13/100000] average_loss = 0.004697896095744351\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [14/100000] average_loss = 0.004687661141679998\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [15/100000] average_loss = 0.004655592012823674\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [16/100000] average_loss = 0.004595928704529478\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [17/100000] average_loss = 0.004568418233018173\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [18/100000] average_loss = 0.004558674494425456\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [19/100000] average_loss = 0.004570525518634863\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [20/100000] average_loss = 0.004572713087525284\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [21/100000] average_loss = 0.004563712864591364\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [22/100000] average_loss = 0.004498099365778136\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [23/100000] average_loss = 0.004449873953534846\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [24/100000] average_loss = 0.004484773205037703\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [25/100000] average_loss = 0.004398583738427413\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [26/100000] average_loss = 0.004340721536100957\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [27/100000] average_loss = 0.004394709227377908\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [28/100000] average_loss = 0.004414253694969311\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [29/100000] average_loss = 0.0043628366892797905\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [30/100000] average_loss = 0.0042474141246394105\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [31/100000] average_loss = 0.004357850760744329\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [32/100000] average_loss = 0.004315985190240961\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [33/100000] average_loss = 0.00410254764975163\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [34/100000] average_loss = 0.004112129096399274\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [35/100000] average_loss = 0.004205447932084401\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [36/100000] average_loss = 0.004242659451668723\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [37/100000] average_loss = 0.0042895584252842685\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [38/100000] average_loss = 0.00440603481572971\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [39/100000] average_loss = 0.004132882597153647\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [40/100000] average_loss = 0.0040611259769975094\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [41/100000] average_loss = 0.00396897013772998\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [42/100000] average_loss = 0.003915625183205856\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [43/100000] average_loss = 0.003940282500626748\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:time series encoder epoch [44/100000] average_loss = 0.004178977953760247\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training the mixers\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "\u001b[33mWARNING:lightwood-46866:LightGBM running on CPU, this somewhat slower than the GPU version, consider using a GPU instead\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py:116: UserWarning: torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - " warnings.warn(\"torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\")\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/pytorch_ranger/ranger.py:172: UserWarning: This overload of addcmul_ is deprecated:\n", - "\taddcmul_(Number value, Tensor tensor1, Tensor tensor2)\n", - "Consider using one of the following signatures instead:\n", - "\taddcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at ../torch/csrc/utils/python_arg_parser.cpp:1005.)\n", - " exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)\n", - "\u001b[32mINFO:lightwood-46866:Loss of 0.539688229560852 with learning rate 0.0001\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Loss of 0.7796856760978699 with learning rate 0.00014\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Found learning rate of: 0.0001\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 1: 0.6908893585205078\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 2: 0.6882499903440475\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 3: 0.6850549429655075\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 4: 0.6813623607158661\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 5: 0.6772531270980835\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 6: 0.6728083938360214\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 7: 0.6652606427669525\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 8: 0.6601350754499435\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 9: 0.6548376232385635\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 10: 0.6494599282741547\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 11: 0.6441417187452316\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 12: 0.6389893442392349\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 13: 0.6309126764535904\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 14: 0.6257634907960892\u001b[0m\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 15: 0.6205589026212692\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 16: 0.6152833849191666\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 17: 0.6099573820829391\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 18: 0.6046575754880905\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 19: 0.5962131917476654\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 20: 0.5909084677696228\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 21: 0.5856661349534988\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 22: 0.5805662572383881\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 23: 0.575617328286171\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 24: 0.5707968175411224\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 25: 0.5632813721895218\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 26: 0.5587586611509323\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 27: 0.554344117641449\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 28: 0.5499386340379715\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 29: 0.5455891937017441\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 30: 0.5413248538970947\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 31: 0.5345934927463531\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 32: 0.5304456949234009\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 33: 0.526373103260994\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 34: 0.5223924517631531\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 35: 0.5184392035007477\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 36: 0.5145991444587708\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 37: 0.5086493343114853\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 38: 0.5050476491451263\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 39: 0.5015637576580048\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 40: 0.49815742671489716\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 41: 0.4948585033416748\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 42: 0.49173182249069214\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 43: 0.48690974712371826\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 44: 0.4839773178100586\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 45: 0.4811210632324219\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 46: 0.4783552885055542\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 47: 0.4757150560617447\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 48: 0.47318898141384125\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 49: 0.46942955255508423\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 50: 0.4671967923641205\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 51: 0.4650762975215912\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 52: 0.4630257934331894\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 53: 0.46110378205776215\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 54: 0.45930930972099304\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 55: 0.45666399598121643\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 56: 0.4550795406103134\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 57: 0.4535674601793289\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 58: 0.45216208696365356\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 59: 0.45088090002536774\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 60: 0.4496418982744217\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 61: 0.4477883279323578\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 62: 0.4467353969812393\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 63: 0.4457828402519226\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 64: 0.4448719322681427\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 65: 0.44403648376464844\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 66: 0.44328153133392334\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 67: 0.44207488000392914\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 68: 0.4413738548755646\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 69: 0.44084450602531433\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 70: 0.4403578191995621\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 71: 0.4398685395717621\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 72: 0.43935835361480713\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 73: 0.43840254843235016\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 74: 0.4378361850976944\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 75: 0.4375789165496826\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 76: 0.43739429116249084\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 77: 0.4372607320547104\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 78: 0.43708017468452454\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 79: 0.4364318400621414\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 80: 0.43584632873535156\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 81: 0.4356466382741928\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 82: 0.4355204701423645\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 83: 0.43557313084602356\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 84: 0.43554021418094635\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 85: 0.43514105677604675\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 86: 0.43462760746479034\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 87: 0.43442972004413605\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 88: 0.43443459272384644\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 89: 0.4344787895679474\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 90: 0.4345344454050064\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 1: 0.329136921600862\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 2: 0.3284675722772425\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 3: 0.33007449995387683\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 4: 0.32765168764374475\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 5: 0.3260806582190774\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 6: 0.3272357068278573\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 7: 0.3281749730760401\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM models for array prediction\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Started fitting LGBM model\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:A single GBM iteration takes 0.1 seconds\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Training GBM () with 1325 iterations given 165.66666666666666 seconds constraint\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Lightgbm model contains 1 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 2 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Ensembling the mixer\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Mixer: Neural got accuracy: 0.19612012470445245\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Mixer: LightGBMArray got accuracy: 0.21013741093675975\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Picked best mixer: LightGBMArray\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Analyzing the ensemble of mixers\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Adjustment on validation requested.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating the mixers\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/torch/cuda/amp/grad_scaler.py:116: UserWarning: torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - " warnings.warn(\"torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\")\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 1: 0.33339183280865353\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 2: 0.3303144524494807\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 3: 0.330986554423968\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 4: 0.3315189927816391\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 5: 0.33072087665398914\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-46866:Loss @ epoch 6: 0.33309372514486313\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating array of LGBM models...\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:151: UserWarning: Found `num_iterations` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/lightgbm/engine.py:156: UserWarning: Found `early_stopping_rounds` in params. Will use it instead of argument\n", - " warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Updating lightgbm model with 1 iterations\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Model now has a total of 3 weak estimators\u001b[0m\n" - ] - } - ], - "source": [ - "predictor.learn(train)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Predict\n", - "\n", - "Once the predictor has trained, we can use it to generate 6-month forecasts for each of the test set data points:" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-46866:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Transforming timeseries data\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:Featurizing the data\u001b[0m\n", - "/Users/Pato/Work/MindsDB/env/lib/python3.8/site-packages/pandas/core/indexing.py:1637: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " self._setitem_single_block(indexer, value, name)\n", - "\u001b[32mINFO:lightwood-46866:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n", - "\u001b[32mINFO:lightwood-46866:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n" - ] - } - ], - "source": [ - "forecasts = predictor.predict(test)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's check how a single row might look:" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
predictiontruthorder_Monthconfidencelowerupperanomaly
10[51.28799878891615, 46.76867159945164, 52.0899...51.0[-272577600.0, -269899200.0, -267220800.0, -26...[0.24, 0.24, 0.24, 0.24, 0.24, 0.24][30.80746268275371, 26.288135493289204, 31.609...[71.76853489507859, 67.24920770561408, 72.5704...False
\n", - "
" - ], - "text/plain": [ - " prediction truth \\\n", - "10 [51.28799878891615, 46.76867159945164, 52.0899... 51.0 \n", - "\n", - " order_Month \\\n", - "10 [-272577600.0, -269899200.0, -267220800.0, -26... \n", - "\n", - " confidence \\\n", - "10 [0.24, 0.24, 0.24, 0.24, 0.24, 0.24] \n", - "\n", - " lower \\\n", - "10 [30.80746268275371, 26.288135493289204, 31.609... \n", - "\n", - " upper anomaly \n", - "10 [71.76853489507859, 67.24920770561408, 72.5704... False " - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "forecasts.iloc[[10]]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You'll note that the point `prediction` has associated `lower` and `upper` bounds that are a function of the estimated `confidence` the model has on its own output. Apart from this, `order_Month` yields the timestamps of each prediction, `truth` lets us know what is the one-step-ahead observed value (if it exists at all). Finally, the `anomaly` tag will let you know if the observed value falls outside of the predicted region. \n", - "\n", - "\n", - "## Visualizing a forecast\n", - "\n", - "Okay, time series are much easier to appreciate through plots. Let's make one:\n", - "\n", - "NOTE: We will use `matplotlib` to generate a simple plot of these forecasts. If you want to run this notebook locally, you will need to `pip install matplotlib` for the following code to work." - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "code", - "execution_count": 69, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAtQAAAHwCAYAAACG+PhNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAACIoklEQVR4nOzdd3xV9f3H8dc3i7BnSIAEwp6BAGEHmQICClWh4BZFQX9VW6tttdraWtvaVltrFUQRR0HBBcoQAigbZO8RCCPMkLAJZH1/f+SGBkwg4957bpL38/G4D5Jzzznf972HJJ+cfIex1iIiIiIiIkXj53QAEREREZGSTAW1iIiIiEgxqKAWERERESkGFdQiIiIiIsWgglpEREREpBhUUIuIiIiIFIMKahHxCmPMA8aYZU7ncJLJ9r4x5pQxZo3TeZzijfdB/99ExJtUUIt4mTFmvzEm1RhzPtejrtO5bsQY850x5mGnczjJDUVaLHAzEG6t7eymWI4wxvzeGPNxEQ936/tgjIk0xlhjTEBxz+VNxpgpxpiXnc5RVPqeIPI/KqhFnHGrtbZSrseRwhxc0goHuaIBsN9ae8HpIA4r8vug//ulhzHG3+kMIu6iglrERxhjyhlj/mmMOeJ6/NMYU871XG9jTKIx5lfGmGPA+8YYP2PMr40xe40xycaY6caYGrnOF2uMWWGMOW2MOWSMecC1fYgxZoMx5qxr++9zHRNsjPnYdb7TxpgfjDGhxpg/AT2BN1131N907d/CGLPAGJNijNlljBmZ61w1jTGzXO2sARrf4PXPMMYcM8acMcYsMca0zvXcFGPMW8aYua72lxtjwlzv0SljzE5jTPtc+7d03T07bYzZZoy5LddzV91Vu/aus+tO5zhjzB7X8f9xdVFoCUwAurkynM7nddR1ve4UY0y8MWasa/tDwLu5jn8pj2ObGGO+d70HJ40xn7q2/+gObO7XkfMajDF/d70fCcaYW655jfuMMedcz92da/tyY8ybrjZ3GmP6FeC1DAKeA37qei2brtfONa8xz/fBGDPW1UaKq826uY6xxpjHjTF7gD15vO1LXP+edp2zW65j83tPqhpj3jPGHDXGHDbGvGzyKfBM9t346caYD12vbZsxJuaa9+lzY0ySq50nXNtrmOyv21tdn1dyvcb7jDGPAHcDz7oyf51Hu8YY87ox5oTJ/jraYoxp43qunOu1HTTGHDfGTDDGlHc9l/P94lnXsUeNMcONMYONMbtd7/FzudrJ93uJce/3hCnGmLeNMXOMMReAPnm93yIlkrVWDz308OID2A/0z2P7H4BVQG0gBFgB/NH1XG8gA/grUA4oDzzp2j/ctW0iMM21fwPgHDAaCARqAtG5zhVF9i/UbYHjwHDXc48CXwMVAH+gI1DF9dx3wMO58lYEDgEPAgFAe+Ak0Mr1/CfAdNd+bYDDwLLrvC9jgMqu1/JPYGOu56a4zt0RCAYWAQnAfa6cLwOLXfsGAvFkF3xBQF/Xe9E8n9fxQO5cgAW+AaoB9YEkYFBe++bzOpYAb7lyRruO71uQ44FpwPOuaxMMxLq2R7pyBeTa98rrcJ03HRjrej/GA0cA43r/z+Z6/XWA1rmOywB+7nrffgqcAWoU4LX8Hvj4mv8PebaTx+u89j3v67q+HVzX/9/AkmuuyQKgBlA+j/Pl9f7k+564nv+S7K+ZimR/za0BHs0n7++BS8Bg17n+DKxyPecHrANeJPv/WyNgHzDQ9fwA4JirjUnAZ9f8v375Ov8fBrrOXc11LVsCdVzPvQ7Mcr0nlcn+uv3zNd8vXnRd17GuazfVtW9rIBVo6Nr/et9L3Pk9YQrZ/796uN63YKe/H+uhh7sejgfQQ4+y9iC7oD4PnHY9vnJt3wsMzrXfQLL/LJ7zAzIt9w8gYAfQL9fndVwFRADwG+DLAub5J/C66+MxZBfybfPY79ofnj8Fll6zz0Tgd64fvOlAi1zPvcINitFc+1Yju0Cq6vp8CjAp1/M/A3bk+jwKOO36uCfZBYxfruenAb/P53U8wI8L6thcn08Hfp3XvnnkjgAygcq5tv0ZmFLA4z8E3iG7b3Hu7ZHcuKCOz/VcBdf+YWQXOaeBO7imGHUdd6XIdG1bA9xbgNfye35cUOfZTh6v89r3/D3g1VyfV3L9/4nMdU36Xud8eb0/13tPQoHLuXOS/cvn4nzO/3sgLtfnrYBU18ddgIPX7P8b4P1cn/8b2EL2L5U1c22fwvUL6r7AbqArV/9/NsAFoHGubd2ABNfHvckumP1dn1d2vfYuufZfx/9+kb7e9xK3fE/I9Xo/vNHXvx56lMSHunyIOGO4tbaa6zHcta0ucCDXPgdc23IkWWsv5fq8AfCl68+wp8n+oZhJdrEQQXaB/iPGmC7GmMWuP0+fAcYBtVxPfwR8C3xisrudvGqMCcznNTQAuuS078pwN9kFSwjZP4wPXfN68mSM8TfG/MX1J+ezZP/SQa5ckH0nPUdqHp9Xcn1cFzhkrc26pu16+bWfh2O5Pr6Y69w3UhdIsdaeK2Lbz5JdLK1xdSsYU8DjIFdma+1F14eVbHY/5Z+SfZ2PGmNmG2Na5DrusLXWXpO3bmFfSwHauZ6r/u9ba88Dyde0dejagwogz/eE7P+7ga6cOf93J5J9F/mG5yL7/0Swye6C0wCoe83XwXNkfx3meIfsv9JMsdYmFzS8tXYR8CbwH+CEMeYdY0wVsr++KgDrcrU5z7U9R7K1NtP1carr3/y+Zq73vcRd3xNyFOU6ivg8FdQivuMI2T+QctR3bcthr96dQ8AtuQrzatbaYGvtYddz+fVZnkr2n4ojrLVVye4XbACstenW2pesta2A7sBQsrtV5Nf+99e0X8laO57sPy9nkF3Y5349+bkLGAb0B6qSfceRnFyFdASIMMbk/v5Wn+y7g5B9Z69Crudy/7C/kWvfg7zarmGMqZxP29c/ubXHrLVjrbV1yf5T+1vGmCauzFDE3Nbab621N5N953En2V0PctQzxuR+n3P+393otfzovbhBO9dz1f99Y0xFsrsp5X7frvfe3+i6XOsQ2Xeoa+X6v1vFWtv6Rgfmc66Ea74OKltrB8OVgXfvkP3Xh8dc17PAua21b1hrO5J9V7wZ8AzZ3ShSye5Sk9NmVWttQX/xy+s15Pm9xI3fEwr8mkVKIhXUIr5jGvBbY0yIMaYW2f0frzct2QTgT8aYBgCu44a5nvsv0N8YM9IYE2CyBwhGu56rTPadx0vGmM5kF7O4ztHHGBPlKgLOkv1n35w7vcfJ7h+a4xugmTHmXmNMoOvRyRjT0nVn7Avg98aYCsaYVsD913ktlckucJLJLhpfud4bdQOryb6D+KwrU2/gVrL7dANsBG535WoCPFSIcx8Hwo0xQXk9aa09RPafx//sGszV1nX+Ak0vZ4wZYYwJd316iuziI8tam0R2cXmP627+GG4wyDPXOUONMcNcReplsrsb5b57Xxt4wvVejSC7n+6cAryW40Bkzi8uBWjneqYBDxpjok32QNxXgNXW2v0FPD7J1VajG+0IYK09CswH/mGMqeIalNfYGNOrgO3ltgY4Z7IHDJd3XZ82xphOruefI/s6jgH+Bnxo/jf48dqvqau4vp66uO4IXyC7H3eW668vk4DXjTG1XfvWM8YMLEJ+uM73End9TyhiLpESQwW1iO94GVgLbCa7v+V617b8/IvsO83zjTHnyB5U1AXAWnuQ7AFUTwMpZBeR7VzHPQb8wXXMi2T3Ec4RBnxG9g/OHcD3ZP/JN6e9O032jAlvuLoCDABGkX2H8Rj/GzQJ8H9k/0n5GNl9J9+/zmv5kOw/+R8GtrteS5FYa9PILqBvIftO3lvAfdbana5dXie7P/px4AOyf/koqEXANuCYMeZkPvuMJvsO+xGyB779zlobV8DzdwJWG2POk31tn7TW7nM9N5bsu5PJZA8qW1HAc/oBv3DlSQF6kT1AL8dqoCnZ79WfgDtzdUu43muZ4fo32RizvgDt5Mt1zheAz4GjZP+yMKqAry+nO8efgOWurgZdC3DYfWQPItxO9i8vn5F9Z71QXL88DiV70GYC2e/ju0BVY0xHst+T+1z7/ZXs4vrXrsPfA1q5Mn+Vx+mrkF04nyL76yOZ7KIc4FdkD75d5eomFQc0L2x+l3y/l+De7wkipVbOaGcRESljTPZUig9ba2OdziIiUpLpDrWIiIiISDGooBYRERERKQZ1+RARERERKQbdoRYRERERKQYV1CIiIiIixRDgdIDiqFWrlo2MjHQ6hoiIiIiUcuvWrTtprQ3J67kSXVBHRkaydu1ap2OIiIiISClnjDmQ33Pq8iEiIiIiUgwqqEVEREREikEFtYiIiIhIMZToPtQiIiIiviI9PZ3ExEQuXbrkdBQphuDgYMLDwwkMDCzwMSqoRURERNwgMTGRypUrExkZiTHG6ThSBNZakpOTSUxMpGHDhgU+Tl0+RERERNzg0qVL1KxZU8V0CWaMoWbNmoX+K4MKahERERE3UTFd8hXlGqqgFhERESnF9u/fT5s2bZyO8SO9e/d2+3oia9eu5YknnnDrOQtCfahFREREpFAyMjIICPCtMjIjI4OYmBhiYmK83rbuUIuIiIiUEq+99hpt2rShTZs2/POf/7yyPSMjg7vvvpuWLVty5513cvHiRQB+/etf06pVK9q2bcsvf/lLAJKSkrjjjjvo1KkTnTp1Yvny5QD8/ve/595776VHjx7ce++9dO3alW3btl1pI+eO84ULFxgzZgydO3emffv2zJw5E4DU1FRGjRpFy5Yt+clPfkJqamqer6E4mb777juGDh0KkG+Obdu20blzZ6Kjo2nbti179uwp9vvuW79aiIiIiJQCT817io3HNrr1nNFh0fxz0D/zfX7dunW8//77rF69GmstXbp0oVevXlSvXp1du3bx3nvv0aNHD8aMGcNbb73Fgw8+yJdffsnOnTsxxnD69GkAnnzySX7+858TGxvLwYMHGThwIDt27ABg+/btLFu2jPLly/P6668zffp0XnrpJY4ePcrRo0eJiYnhueeeo2/fvkyePJnTp0/TuXNn+vfvz8SJE6lQoQI7duxg8+bNdOjQ4UevITk5uViZvvvuuyvn+tOf/pRnjgkTJvDkk09y9913k5aWRmZmZrGvjQpqERERkVJg2bJl/OQnP6FixYoA3H777SxdupTbbruNiIgIevToAcA999zDG2+8wVNPPUVwcDAPPfQQQ4cOvXJnNy4uju3bt18579mzZzl//jwAt912G+XLlwdg5MiRDBgwgJdeeonp06dz5513AjB//nxmzZrF3//+dyB79pODBw+yZMmSK/2b27ZtS9u2bX/0GqpWrVqsTLnll6Nbt2786U9/IjExkdtvv52mTZsW6f3OTQW1iIiIiJtd706yE66ducIYQ0BAAGvWrGHhwoV89tlnvPnmmyxatIisrCxWrVpFcHDwj86TU6wD1KtXj5o1a7J582Y+/fRTJkyYAGTP5fz555/TvHnzQucsbqbc8svRsmVLunTpwuzZsxk8eDATJ06kb9++hc6am/pQi4iIiJQCPXv25KuvvuLixYtcuHCBL7/8kp49ewJw8OBBVq5cCcDUqVOJjY3l/PnznDlzhsGDB/P666+zadMmAAYMGMC///3vK+fduHFjvm3+9Kc/5dVXX+XMmTNX7jgPHDiQf//731hrAdiwYQMAN910E1OnTgVg69atbN68+Ufnc0emHPnl2LdvH40aNeKJJ55g2LBheeYoLBXUIiIiIqVAhw4deOCBB+jcuTNdunTh4Ycfpn379gA0b96c//znP7Rs2ZJTp04xfvx4zp07x9ChQ2nbti2xsbG89tprALzxxhusXbuWtm3b0qpVqyt3nvNy55138sknnzBy5Mgr21544QXS09Np27YtrVu35oUXXgBg/PjxnD9/npYtW/Liiy/SsWPHH53PHZlulGP69Om0adOG6Ohotm7dyn333VfAdzh/JqdqL4liYmKsu+cvFBERESmKHTt20LJlS6djiBvkdS2NMeustXnOyac71D4iM6v4I0xFRERExPtUUPuA5IvJ1Hy1Jm/98JbTUURERESkkFRQ+4AtJ7Zw5vIZnpr3FKsSVzkdR0REREQKQQW1D9iTnL1CT7XgaoycMZKTF086nEhERERECkoFtQ/Yk7KHIP8gvrnrG45fOM49X9xDls1yOpaIiIiIFIAKah8QnxJPo+qN6FyvM28MeoNv937LK0tfcTqWiIiIiBSACmofsCdlD01rZC97+UjHR7g76m5eXPwicfviHE4mIiIipdnDDz981ZLeefnqq69uuI+79O7dm5wpkQcPHszp06cLnOvFF18kLs6Z2kkFtcOybBZ7U/bSpEYTIHsp0AlDJ9AypCV3fX4Xh88edjihiIiIlFbvvvsurVq1uu4+xS2oMzIyinTcnDlzqFatWr7PX5vrD3/4A/379y9SW8XlsYLaGDPZGHPCGLP1mu0/M8bsNMZsM8a8mmv7b4wx8caYXcaYgZ7K5WuOnDtCakbqlTvUAJWCKvHZiM+4mH6RUZ+PIj0z3cGEIiIiUhLs37+fFi1acPfdd9OyZUvuvPNOLl68CMDChQtp3749UVFRjBkzhsuXLwNX3xGuVKkSzz//PO3ataNr164cP36cFStWMGvWLJ555hmio6PZu3fvVW0+8MADjBs3jpiYGJo1a8Y333wDwJQpU7jtttvo27cv/fr148KFC4wZM4bOnTvTvn17Zs6cCUBqaiqjRo2iZcuW/OQnPyE1NfXKuSMjIzl5Mnuihg8//JC2bdvSrl077r333jxzPfDAA3z22WfXfb2RkZH87ne/o0OHDkRFRbFz5063vPcBbjlL3qYAbwIf5mwwxvQBhgHtrLWXjTG1XdtbAaOA1kBdIM4Y08xaW+pXO8mZ4aNpzaZXbW8Z0pJJt07iri/u4rmFz/G3AX9zIp6IiIgUwbyn5nFs4zG3njMsOoxB/xx03X127drFe++9R48ePRgzZgxvvfUW//d//8cDDzzAwoULadasGffddx9vv/02Tz311FXHXrhwga5du/KnP/2JZ599lkmTJvHb3/6W2267jaFDh3LnnXfm2eb+/ftZs2YNe/fupU+fPsTHxwOwfv16Nm/eTI0aNXjuuefo27cvkydP5vTp03Tu3Jn+/fszceJEKlSowI4dO9i8eTMdOnT40fm3bdvGyy+/zIoVK6hVqxYpKSnUqFEj31yXLl267uutVasW69ev56233uLvf/877777bgGvQP48dofaWrsESLlm83jgL9bay659Tri2DwM+sdZettYmAPFAZ09l8yXxKdn/6XK6fOQ2Omo0j8U8xt9X/p0vd3zp7WgiIiJSwkRERNCjRw8A7rnnHpYtW8auXbto2LAhzZo1A+D+++9nyZIlPzo2KCiIoUOHAtCxY0f2799foDZHjhyJn58fTZs2pVGjRlfu+t58883UqFEDgPnz5/OXv/yF6OhoevfuzaVLlzh48CBLlizhnnvuAaBt27a0bdv2R+dftGgRI0aMoFatWgBXzpmfG73e22+/vdCv8UY8eYc6L82AnsaYPwGXgF9aa38A6gG5VzRJdG0r9XKmzIuoEpHn868NfI01R9bwwMwHaBvalsY1Gns5oYiIiBTWje4ke4ox5rqfX09gYOCV/f39/Qvc9zm/NitWrHhlm7WWzz//nObNmxc4j6eUK1cOKNxrvBFvD0oMAGoAXYFngOmmMFcaMMY8YoxZa4xZm5SU5ImMXpUzZZ6/n3+ez5cLKMeMETPwN/7cOeNOUtNT89xPRERE5ODBg6xcuRKAqVOnEhsbS/Pmzdm/f/+VrhgfffQRvXr1KvA5K1euzLlz5/J9fsaMGWRlZbF371727duXZ9E8cOBA/v3vf2OtBWDDhg0A3HTTTUydOhWArVu3snnz5h8d27dvX2bMmEFycjIAKSkp181V3NdbFN4uqBOBL2y2NUAWUAs4DOS+RRvu2vYj1tp3rLUx1tqYkJAQjwf2tNxT5uUnslokH/3kIzYe28iT8570UjIREREpaZo3b85//vMfWrZsyalTpxg/fjzBwcG8//77jBgxgqioKPz8/Bg3blyBzzlq1Cj+9re/0b59+x8NSgSoX78+nTt35pZbbmHChAkEBwf/aJ8XXniB9PR02rZtS+vWrXnhhRcAGD9+POfPn6dly5a8+OKLdOzY8UfHtm7dmueff55evXrRrl07fvGLX1w3V3Ffb1GYnN8UPHJyYyKBb6y1bVyfjwPqWmtfNMY0AxYC9YFWwFSy+03XdW1veqNBiTExMTZnZGpJlGWzqPhKRR6LeYx/DPzHDfd/buFz/HnZn5kybAr3R9/vhYQiIiJSUDt27KBly5aOtb9//36GDh3K1q1bb7yzmzzwwAPXHbBYUuV1LY0x66y1MXnt78lp86YBK4HmxphEY8xDwGSgkWsqvU+A+113q7cB04HtwDzg8bIww8eRc0e4lHEpzwGJeflDnz/QO7I342ePZ8vxLR5OJyIiIiIF4bFBidba0fk8dU8++/8J+JOn8vii/KbMy0+AXwDT7phG+4ntuXPGnawdu5bK5Sp7MqKIiIiUEJGRkV69Ow3Z802LVkp01PWmzMtPWKUwPrnjE+JT4nn464fxZJcdEREREbkxFdQOutGUefnpFdmLV/q+wvRt03lzzZseSiciIiKFpRtdJV9RrqEKagftSdlD4+qN850y73qe6fEMtza7lafnP83qxNUeSCciIiKFERwcTHJysorqEsxaS3Jycp4zlVyPtxd2kVziU+IL1d0jNz/jxwfDP6DDOx0YMWMEGx7dQM0KNd2cUERERAoqPDycxMRESsM6GWVZcHAw4eHhhTpGBbVDsmwW8SnxDGg0oMjnqF6+OjNGzKDH5B7c8+U9zL5rNn5Gf3QQERFxQmBgIA0bNnQ6hjhA1ZdDCjtlXn5i6sbwr0H/Yl78PF5Z+oqb0omIiIhIQamgdkhhp8y7nkc7PspdUXfxu+9+x8J9C4t9PhEREREpOBXUDtmT4iqob7DseEEYY5g4dCLNazbnri/u4vDZPFdtFxEREREPUEHtkPiUeIL8gwivUrhO7/mpFFSJz0d+zoW0C4z6fBTpmeluOa+IiIiIXJ8KaocUZ8q8/LQMack7t77DsoPLeH7R8247r4iIiIjkTwW1Q4ozZd713BV1F+NjxvO3FX/jq51fuf38IiIiInI1FdQOyJkyzx39p/Py+sDXiakbwwNfPcCB0wc80oaIiIiIZFNB7YDDZw9zKeOSW2b4yEu5gHJ8cscnnLl8hmlbp3mkDRERERHJpoLaAfEp8QAe6fKRo3GNxrSs1ZKlB5d6rA0RERERUUHtCHdOmXc9Pev3ZPnB5WTZLI+2IyIiIlKWqaB2gLunzMtPbP1Yzlw+w7YT2zzajoiIiEhZpoLaAZ6YMi8vsfVjAdTtQ0RERMSDVFA7YE/yHo8NSMwtslok9SrXY9nBZR5vS0RERKSsUkHtZVk2i72n9tKkuucGJOYwxhBbP1YFtYiIiIgHqaD2Mk9PmXet2PqxHDp7SPNRi4iIiHiICmov88aUebnl9KPWXWoRERERz1BB7WXemjIvR1TtKKqUq6KCWkRERMRDVFB72Z7kPZTzL0dE1QivtOfv50/3iO6a6UNERETEQ1RQe1n8qXgaVW+En/HeWx8bEcu2pG2kpKZ4rU0RERGRskIFtZd5a8q83Ho26AnAikMrvNquiIiISFmggtqLvDllXm6d6nYi0C+QpQfU7UNERETE3VRQe5G3p8zLUT6wPDF1Y1h2SAMTRURERNxNBbUX5UyZ560ZPnLrWb8nPxz+gdT0VK+3LSIiIlKaqaD2opwp87w1B3VusfVjSc9K54cjP3i9bREREZHSTAW1F3l7yrzcukd0B7TAi4iIiIi7qaD2IiemzMtRs0JNWoe0VkEtIiIi4mYqqL3IiSnzcoutH8uKQyvIzMp0LIOIiIhIaaOC2ktypsxzYkBijtj6sZy5fIatJ7Y6lkFERESktFFB7SU5U+Y5MSAxR8/62Qu8qNuHiIiIiPuooPaSnBk+nLxDXb9qfcKrhGs+ahERERE3UkHtJTlzUDt5h9oYQ2z9WJYeWIq11rEcIiIiIqWJCmovcXLKvNxiI2I5fO4wB84ccDSHiIiISGmhgtpL4k/F07hGY0emzMutZwP1oxYRERFxJxXUXrIneY+j3T1ytA5pTdVyVVl6YKnTUURERERKBRXUXuALU+bl8Pfzp3tEdw1MFBEREXETFdRe4AtT5uXWs35PtidtJ/listNRREREREo8FdRe4AtT5uUWWz8WgOWHljucRERERKTkU0HtBTlT5jm57Hhunep1Isg/SAMTRURERNxABbUX5EyZF14l3OkoAAQHBNOpbicV1CIiIiJuoILaC/ak7PGJKfNyi60fy9oja0lNT3U6ioiIiEiJ5jsVXikWnxLvMwMSc8TWjyU9K501h9c4HUVERESkRFNB7WG+NGVebj0iegBa4EVERESkuDxWUBtjJhtjThhjtubx3NPGGGuMqeX63Bhj3jDGxBtjNhtjOngql7flTJnnawV19fLVaVO7DUsPaoEXERERkeLw5B3qKcCgazcaYyKAAcDBXJtvAZq6Ho8Ab3swl1flTJnna10+AGIjYllxaAWZWZlORxEREREpsTxWUFtrlwApeTz1OvAsYHNtGwZ8aLOtAqoZY+p4Kps37Ul2zUHtI1Pm5dazQU/OpZ1jy4ktTkcRERERKbG82ofaGDMMOGyt3XTNU/WAQ7k+T3RtK/HiU+J9asq83HIWeFE/ahEREZGi81pBbYypADwHvFjM8zxijFlrjFmblJTknnAe5ItT5uWoX7U+EVUi1I9aREREpBi8WeU1BhoCm4wx+4FwYL0xJgw4DETk2jfcte1HrLXvWGtjrLUxISEhHo5cfPEp8T43IDG32PqxLDu4DGvtjXcWERERkR/xWkFtrd1ira1trY201kaS3a2jg7X2GDALuM8120dX4Iy19qi3snlKzpR5vjggMUfP+j05cu4I+0/vdzqKiIiISInkyWnzpgErgebGmERjzEPX2X0OsA+IByYBj3kqlzclnk30ySnzcsvpR61uHyIiIiJFE+CpE1trR9/g+chcH1vgcU9lcUp8Sjzgm1Pm5WhduzXVgqux7OAy7mt3n9NxREREREoc3xspV4r48pR5OfyMHz0iemimDxEREZEiUkHtQb48ZV5usfVj2XFyBycvnnQ6ioiIiEiJo4Lag3x5yrzccvpRLz+43OEkIiIiIiWPb1d6JdyelD0+PSAxR6e6nSjnX07dPkRERESKQAW1h2TZLPam+PaUeTnKBZSjU71OmulDREREpAhUUHtI4tlELmdeLhF3qAFiI2JZd3QdF9MvOh1FREREpERRQe0hOVPm+fIMH7n1bNCTjKwM1hxe43QUERERkRJFBbWH5EyZVxK6fAB0C++GwbD0gLp9iIiIiBSGCmoP2ZOyp0RMmZejevnqtKndhmWHNDBRREREpDBUUHtIfEp8iZgyL7fY+rGsOLSCjKwMp6OIiIiIlBglp9orYUrKlHm59azfk/Np59l8fLPTUURERERKDBXUHpAzZV5JK6hzFnjRfNQiIiIiBaeC2gNypswrKQMSc0RUjaB+1foqqEVEREQKQQW1B+TM8FFSpszLrWf9niw9uBRrrdNRREREREoEFdQekDMHdUm7Qw3Z3T6OnT/GvlP7nI4iIiIiUiKooPaAPSl7CA4ILjFT5uWmftQiIiIihaOC2gPiU+JpXL1kTZmXo1VIK6oHV1dBLSIiIlJAJa/iKwH2pOwpkd09APyMHz3q92DpQa2YKCIiIlIQKqjdrKROmZdbbEQsu5J3kXQhyekoIiIiIj5PBbWbldQp83Lr2aAnAMsPLXc4iYiIiIjvU0HtZiV5yrwcHet0pJx/OZYeULcPERERkRtRQe1mOVPmleQuH+UCytG5XmeWHdLARBEREZEbUUHtZjlT5tWrUs/pKMXSs35P1h9dz4W0C05HEREREfFpKqjdbE/KnhI7ZV5usfVjycjKYPXh1U5HEREREfFpJbvq80HxKfElekBijm4R3TAYzUctIiIicgMqqN2oNEyZl6NacDWiQqNUUIuIiIjcgApqN8qZMq8kz/CRW8/6PVmZuJKMrAyno4iIiIj4LBXUbpQzZV5p6PIB2f2oz6edZ9OxTU5HEREREfFZKqjdaE+Kaw7qUtDlA7ILakDdPkRERESuQwW1G8WnxJeKKfNyhFcJJ7JaJEsPaoEXERERkfyooHaj0jJlXm6x9WNZdnAZ1lqno4hcV0ZWBhPWTuBSxiWno4iISBlTeio/HxCfEl9qBiTmiI2I5fiF4+w9tdfpKCLX9c3ubxg/ezxf7fzK6SgiIlLGqKB2k5wp85pULx0DEnP0bNATgKUH1O1DfNuihEUAbDm+xeEkIiJS1qigdpNDZw6VqinzcrSo1YIa5WtoYKL4vIUJCwHYckIFtYiIeJcKajeJT4kHSs+UeTn8jB89Inqw7JAKavFdx84fY3vSdvyMnwpqERHxOhXUblLapszLrWf9nuxO3s2JCyecjiKSp8UJiwG4rflt7D+9n3OXzzmcSEREyhIV1G5S2qbMy03zUYuvW5iwkGrB1biv7X0AbEva5nAiEREpS1RQu0lpnDIvR8e6HQkOCFZBLT5rUcIi+kT2oV1YO0ADE0VExLtKX/XnkD3Je0rdgMQcQf5BdKnXRQW1+KSEUwkknE6gb8O+RFaLpGJgRfWjFhERr1JB7QaZWZnsPbW3VPafzhFbP5b1R9dzPu2801FErpIzXV7fhn3xM360qd1GBbWIiHiVCmo3SDybSFpmWqmb4SO32PqxZNpMVieudjqKyFUW7V9EWKUwWtZqCUBU7Si2HN+i1T1FRMRrVFC7Qc6UeaX5DnW38G4YjLp9iE+x1rIoYRF9G/bFGANAVGgUyanJHDt/zOF0IiJSVqigdoOcKfNK8x3qqsFVaRfWjqUHtWKi+I4dJ3dw7Pwx+jXsd2Vbm9ptAC3wIiIi3qOC2g32JO8ptVPm5RYbEcuqxFWkZ6Y7HUUEuLr/dI6o2lGAZvoQERHvUUHtBvGn4mlSo0mpnDIvt9j6sVxIv8Cm45ucjiICZM8/3bBaQyKrRV7ZFlIxhNCKobpDLSIiXlO6K0Av2ZO8p1R398jRo34PAFYlrnI4iUj27Drf7f/uqu4eOaJCo1RQi4iI16igLqayMGVejnqV61GrQi02HtvodBQRNhzbwOlLp6/q7pEjqnYU25O2k5mV6UAyEREpazxWUBtjJhtjThhjtuba9jdjzE5jzGZjzJfGmGq5nvuNMSbeGLPLGDPQU7ncrSxMmZfDGEN0WLQKavEJOf2n+zTs86PnompHcSnjEntP7fV2LBERKYM8eYd6CjDomm0LgDbW2rbAbuA3AMaYVsAooLXrmLeMMf4ezOY2OTN8lIU71ADRodFsPbFVAxPFcYsSFtE6pDVhlcJ+9FxUqAYmioiI93isoLbWLgFSrtk231qb4fp0FRDu+ngY8Im19rK1NgGIBzp7Kps7XZmDupQuO36t6LBoLmdeZlfyLqejSBmWlpnG0oNL8+zuAdAqpBUGo37UIiLiFU72oR4DzHV9XA84lOu5RNc2n5czZV7dynWdjuIV0WHRAOr2IY5anbiai+kX8y2oKwRWoEmNJiqoRUTEKxwpqI0xzwMZwH+LcOwjxpi1xpi1SUlJ7g9XSGVlyrwczWs1JzggmA1HNzgdRcqwRQmL8DN+9I7sne8+UaFR6vIhIiJe4fUq0BjzADAUuNtaa12bDwMRuXYLd237EWvtO9baGGttTEhIiEezFkRZmTIvR4BfAFG1o9h4fKPTUaQMW5iwkA51OlAtuFq++0TVjiI+JZ6L6Re9F0xERMokrxbUxphBwLPAbdba3D/lZgGjjDHljDENgabAGm9mK4qyNGVebjkzffzv9yHxFZcyLvHaytc4e/ms01E85kLaBVYlrqJvZN7dPXJE1Y7CYtmetN1LyUREpKzy5LR504CVQHNjTKIx5iHgTaAysMAYs9EYMwHAWrsNmA5sB+YBj1trfX4C2Zwp88piQZ2SmkLi2USno8g1Xlj0Ak/Pf5rJGyY7HcVjlh1cRnpWOv0a/XhBl9w004eIiHhLgKdObK0dncfm966z/5+AP3kqjyfkTJlXlrp8wNUDEyOqRlx/Z/GalYdW8o+V/wDgq51f8VTXp5wN5CGLEhYR6BdIj4ge192vcfXGBAcEa2CiiIh4XNkYSechZW3KvBxtQ9tiMGw4poGJviI1PZUHZj5ARNUInuj8BEsPLiX5YrLTsTxi0f5FdIvoRsWgitfdz9/Pn1YhrVRQi4iIx6mgLoayNmVejkpBlWhas6mmzvMhLyx+gd3Ju3nvtve4r919ZNksvtn9jdOx3O5U6inWHVl3w/7TOaJqa6YPERHxPBXUxVDWpszLTUuQ+47lB5fz2srXGNdxHP0b9adDnQ6EVwln5q6ZTkdzu+8PfI/F5jv/9LWiakdx/MJxki44P8WmiIiUXmWvEnSjPcl7ytyAxBzRodEknE7g9KXTTkcp0y6mX+TBmQ9Sv2p9Xr35VQCMMQxrPox58fNK3ZRxixIWUSGwAl3CuxRo/5yBiVtPbPVkLBERKeNUUBdRzpR5ZW1AYo6cgYmbj292NkgZ99tFv2VPyh4mD5tM5XKVr2wf1nwYqRmpxO2LczCd+y1MWEjP+j0J8g8q0P5RtV0zfagftYiIeJAK6iIqq1Pm5Whfpz2gJcidtOzgMv656p88FvPYj7pA9IrsRdVyVZm5s/R0+zh2/hjbk7YXuLsHQFilMGqWr6l+1CIi4lEqqIuorE6ZlyOsUhihFUM104dDcrp6RFaL5K83//VHzwf5BzG46WC+3v01mVk+P6V7gSxOWAxAv4bXn386N2NM9hLkukMtIiIepIK6iMrqlHm5aWCic55b+BzxKfFMHjaZSkGV8txneIvhJF1MYmXiSi+n84yFCQupFlztSnejgoqqHcXWE1vJslmeCSYiImWeCuoi2pO8h/IB5cvclHm5RYdFs+3ENtIy05yOUqYsPbCUN1a/weOdHqd3ZO989xvUZBBB/kF8tfMrr2XzpEUJi+gd2Rt/P/9CHRdVO4oL6RfYf3q/Z4KJiEiZp4K6iPak7KFxjcZlcsq8HNFh0aRnpbMjaYfTUcqMC2kXeHDmgzSs3pC/9P/LdfetUq4KfRv25audX2Gt9VJCz0g4lUDC6YRCdffIoSXIRUTE08puNVhM8SnxZXZAYo72YRqY6G3PLXyOvaf2Mvm2/Lt65Da8+XD2ntrL9qTtXkjnOYsSFgEUakBijtYhrQHN9CEiIp6jgroIyvqUeTma1GhChcAKGpjoJd/v/5431rzBzzr/jF6RvQp0zK3NbwUo8d0+Fu1fRFilMFrWalnoYyuXq0zDag1VUIuIiMeooC6Csj5lXg5/P3/ahrbVHWovuJB2gTGzxtC4emP+3O/PBT6ubuW6dKnXpUSvmmitZVHCIvo27IsxpkjnaFO7jbp8iIiIx6igLoKcKfPK8gwfOaJDs2f6KOl9dH3dr+N+TcKpBN4f9j4VgyoW6thhzYfxw5EfOHz2sIfSedaOkzs4dv4YfSML390jR1TtKHYn7+ZyxmU3JhMREcmmgroI9iSX7Tmoc4sOi+bM5TMcOHPA6Sil1nf7v+PNH97kiS5P0LNBz0IfP7zFcABm7Zrl5mTekdN/ul+jwg9IzBEVGkWmzWTnyZ3uiiUiInKFCuoiiE+JL/NT5uXImRNY3T4843zaeR6c+SBNajThlX6vFOkcLWq1oFnNZny16yv3hvOShQkLaVitIZHVIot8Di1BLiIinqSCugg0Zd7/RIVG4Wf8VFB7yK8W/IoDpw/w/rD3qRBYoUjnMMYwrPkwFics5sylM25O6FmZWZl8t/+7Is3ukVuzms0I9AtUP2oREfEIVYRFoCnz/qdCYAWa12yumT48YFHCIt5a+xZPdX2K2PqxxTrX8BbDSc9KZ278XDel844NxzZw+tLpIs0/nVugfyAtQ1rqDrWIiHiECupCypkyTwX1/2gJcvc7d/kcD816iKY1mvJy35eLfb4u9bpQu2LtEjd9Xk7/6T4N+xT7XFG1o1RQi4iIR6igLqRDZw+RlpmmAYm5RIdFc/DMQVJSU5yOUmo8u+DZYnf1yM3fz5/bmt3GnD1zStRMF4sSFtEqpBVhlcKKfa6o2lEknk3kVOopNyQTERH5HxXUhRRWKYylDy5laLOhTkfxGTkDEzcd2+RskFJi4b6FTFg3gZ93/Tk96vdw23mHtxjOubRzfLf/O7ed05PSMtNYenBpsbt75MhZgnzria1uOZ+IiEgOFdSFFBwQTGz9WOpUruN0FJ/hazN9HDl3hIjXI3jk60c4ceGE03EKJaerR7OazdzS1SO3fo36UTGwYolZ5GV14moupl8s9oDEHJrpQ0REPEUFtRRb7Yq1qVu5rs8MTJyzZw6JZxOZvGEyTf/dlL+v+HuJ6ebwzIJnOHT2EFOGTaF8YHm3njs4IJhBTQYxc9dMsmyWW8/tCYsSFuFn/OjVoGDLrN9IeJVwqparqpk+RETE7VRQi1v40sDEuH1x1K1cl22PbaNn/Z48s+AZWr/Vmpk7Z/r0io4L9i5g4rqJ/KLrL+gW0c0jbQxrPowj546w9shaj5zfnRYmLKRDnQ5UL1/dLeczxhAVqoGJIiLifiqoxS2iQ6PZcXIHlzIuOZojy2axMGEh/Rv1p3mt5nxz1zfMu3seQf5BDP90ODd/dLNP3qE8e/ksD816iOY1m/OHPn/wWDtDmg3B3/gzc6dvd/u4kHaBVYmrirXceF6iakex9cRWn/7FSkRESh4V1OIW0WHRZGRlsD1pu6M5Nh/fzMmLJ+nfsP+VbQObDGTTuE38+5Z/s/7oeqInRvPY7Mc4efGkg0mv9sv5v+TwucNMGe7+rh651Shfg5sa3OTzqyYuP7Sc9Kx0t/WfztGmdhvOXD5D4tlEt55XRETKNhXU4hbt67QHnB+YGLcvDsgegJdboH8g/9f5/4h/Ip7HOz3OO+veoem/m/LPVf8kPTPdiahXzN87n0nrJ/HLbr+ka3hXj7c3vMVwtidtZ0/yHo+3VVQL9y0k0C+w2AvaXEsDE0VExBNUUItbNKreiEpBldhw1NmBiXH74mgV0oq6levm+XyN8jV445Y32Dx+M13qdeHn3/6cqLejmLNnjpeTZjtz6QwPz3qYlrVa8lKfl7zS5rDmwwB8eraPRfsX0TW8KxWDKrr1vG1qtwHwyW4/IiJScqmgFrfwM360C23HxuMbHctwOeMySw4suaq7R35ahbRi7t1z+Wb0N1gsQ6YO4Zb/3sKOpB1eSJotMyuTX3z7iytdPYIDgr3SboNqDYgOi/bZVRNPpZ5i3ZF1bpt/Orfq5asTXiVcd6hFRMStVFCL20SHRbPp2CbHpmRbmbiS1IxU+je6cUEN2bM+DGk2hC3jt/DagNdYeWglUW9H8cTcJ9y+6mNmVibbTmzjw00f8sTcJ+gxuQdV/lKFyRsn80z3Z+hcr7Nb27uR4c2Hs+LQCp+cp/v7A99jsW7vP51DS5CLiIi7qaAWt4kOi+Zc2jkSTiU40n7cvjj8jT+9Igs3b3GQfxA/7/Zz9vxsD2M7jOU/P/yHpv9uyptr3iQjK6PQOXIXz0/OffJK8dzm7Tbc/9X9vLfhPQyGsR3GMvX2qW5fwKUghrUYhsXy9a6vvd72jSxKWESFwAp0Ce/ikfNH1Y5iR9IOx/vOi4hI6RHgdAApPdqH/W9gYuMajb3efty+OLqEd6FKuSpFOj6kYghvD32b8Z3G8/Nvf87P5v6Mt9e+zesDX2dA4wF5HpOZlcnOkztZd3Qd646sY93RdWw4toGL6RcBqBBYgfZh7RnbYSwd63SkY92ONK/ZHH8//yK/TndoF9qOBlUbMHPXTB7q8JCjWa61MGEhPev3JMg/yCPnjwqNIj0rnd3Ju2ldu7VH2hARkbJFBbW4TevarfE3/mw8tpE7Wt3h1bZPXzrND0d+4Lc9f1vsc7UNbUvcvXHM3DWTX87/JQM/HsjQZkN5tf+rZNmsGxbPD7d/mI51OxJTN8Yniue8GGMY1nwY76x/hwtpF9w++K+ojp0/xvak7dzf7n6PtZF7pg8V1CIi4g4qqMVtggOCaRnS0pElyL/b/x1ZNqvA/advxBjD8BbDuaXJLfxr9b94ecnLtHqr1ZXnry2eO9bpSItaLXyyeM7P8BbDeWPNG8zfO5+ftPyJ03EAWJywGMBj/aeB7Otk/NlyfAuj2ozyWDsiIlJ2qKAWt4oOi75SFHlT3L44KgZWdHu/23IB5Xi2x7Pc3+5+pmycQp3KdUpk8ZyXng16Uj24Ol/t+spnCuqFCQupFlztSvchTygXUI7mtZprYKKIiLiNCmpxq+jQaD7e/DFJF5IIqRjitXbj9sXRK7KXx/rdhlYK5Vexv/LIuZ0S4BfA0GZD+Wb3N2RkZRDg5/y3g0UJi+gd2dvjv6xE1Y5izeE1Hm1DRETKDs3yIW4VHRYNwKbjm7zW5qEzh9iVvKtA80/L1Ya3GE5KagrLDi5zOgoJpxJIOJ1A30jPdffIEVU7ioTTCZy7fM7jbYmISOmnglrcKqeg9uYS5AsTFgK4rf90WTKg8QDK+ZfziUVeFiUsAn68bLwnRIVmD0zclrTN422JiEjpp4Ja3KpmhZpEVInw6sDEuH1x1K5Y+8qy0lJwlYIqcXPjm5m5aybWWkezLNq/iLBKYbSs1dLjbWkJchERcScV1OJ20WHRXrtDba0lbl8c/Rv1xxjjlTZLm+HNh7P/9H42H9/sWAZrLYsSFtG3YV+vXMfIapFUDKyogYkiIuIWKqjF7aLDotl5ciep6akeb2vria0cv3Bc/aeLYWizoRiMo90+dpzcwbHzx7zSfxrAz/jRpnYbFdQiIuIWKqjF7aLDosmyWWw9sdXjbcXtiwO80++2tAqtFEr3iO7M3DXTsQw5/ac9Of/0taJqR7Hl+BbHu7qIiEjJp4Ja3C73EuSeFpcQR7Oazahftb7H2yrNhjUfxoZjGzhw+oAj7S9MWEjDag1pWL2h19qMCo0iOTWZY+ePea1NEREpnVRQi9tFVoukSrkqHi+o0zLT+H7/9+ru4QbDWwwHYNauWV5vOzMrk+/2f+fVu9Nw9RLkIiIixXHDgtoY08MYU9H18T3GmNeMMQ08H01KKmMM0WHRHp/pY3Xiai6kX9B0eW7QtGZTWtZqyVe7vvJ62xuPbeT0pdPeL6hdU+dppg8RESmugtyhfhu4aIxpBzwN7AU+9GgqKfGiQ6PZfHwzmVmZHmsjbl8cfsaP3pG9PdZGWTK8xXC+3/89p1JPebXdnHnEvV1Q16pQi7BKYbpDLSIixVaQgjrDZo/aGQa8aa39D1DZs7GkpIsOi+ZC+gX2ntrrsTbiEuKIqRtD9fLVPdZGWTK8xXAybSaz98z2aruLEhbRKqQVYZXCvNouZHf78MbgWRERKd0KUlCfM8b8BrgHmG2M8QMCb3SQMWayMeaEMWZrrm01jDELjDF7XP9Wd203xpg3jDHxxpjNxpgORX1B4hva1/HswMSzl8+yOnG1+k+7UUzdGOpUquPV6fPSMtNYenCp16bLu1ZU7Si2JW3z6F9SRESk9CtIQf1T4DLwkLX2GBAO/K0Ax00BBl2z7dfAQmttU2Ch63OAW4CmrscjZHczkRKsVUgrAv0CPVZQf7//ezJtpvpPu5Gf8WNY82HMi5/HpYxLXmlzdeJqLqZfdGzaw6jQKC5lXPLoX1JERKT0K0hB/XNr7WvW2qUA1tqDQOsbHWStXQKkXLN5GPCB6+MPgOG5tn9os60Cqhlj6hQgm/ioIP8gWoW08tjAxLh9cZQPKE+3iG4eOX9ZNazFMC6kX2DhvoVeaW9RwiIMhl4NenmlvWtdmelDAxNFRKQYClJQ35zHtluK2F6otfao6+NjQKjr43rAoVz7Jbq2SQnmySXI4xLi6NmgJ8EBwR45f1nVJ7IPlYMqe22Rl4UJC+lQp4Nj/eBbhrTEYDQwUUREiiXfgtoYM94YswVo7urXnPNIADYXt2HXQMdCL1FmjHnEGLPWGLM2KSmpuDHEg6LDojl2/pjbF844cu4I25O2q/+0B5QLKMctTW9h1q5ZZNksj7Z1Ie0CqxJX0a+hc6tcVgisQJMaTVRQi4hIsVzvDvVU4FZgluvfnEdHa+09RWzveE5XDte/J1zbDwMRufYLd237EWvtO9baGGttTEhISBFjiDfkrJi46dgmt543pzuC+k97xvDmwzl+4TirE1d7tJ3lh5aTnpXu9enyrhUVGqUuH15y5tIZfv/d7zlx4cSNdxYRKUHyLaittWestfuttaOBavyvoI7I75gCmAXc7/r4fmBmru33uWb76AqcydU1REqodmHtAPfP9BGXEEfN8jWvnF/ca3DTwQT6BXp8to+F+xYS6BdIbP1Yj7ZzI1G1o4hPiedi+kVHc5QF/17zb176/iUGfTyIM5fOOB1HRMRtCrJS4hPAf4HarsfHxpifFeC4acBKsruMJBpjHgL+AtxsjNkD9Hd9DjAH2AfEA5OAx4rwWsTHVAuuRmS1SDYe3+i2c1pridsXR79G/fAzBRkCIIVVNbgqvSN7e3zVxEX7F9E1vCsVgyp6tJ0biaodhcWyPWm7ozlKu8ysTCatn3Sli81tn9xGanqq07FERNyiIBXJw0AXa+2L1toXga7A2BsdZK0dba2tY60NtNaGW2vfs9YmW2v7WWubWmv7W2tTXPtaa+3j1trG1tooa+3a4r0s8RXRYdFsOOq+mT52ntzJkXNH1H/aw4a3GM7u5N3sPLnTI+ffemIr64+ud7y7B2gJcm+ZFz+Pg2cO8krfV/joJx+x9MBSfvrZT8nIynA6mohIsRWkoDZA7lUPMl3bRG4oOjSa3cm7uZB2wS3ni9sXB6j/tKfd1vw2ALd2+7DWsuTAEm6dditRb0cRHBDMiFYj3Hb+ompcvTHlA8prYKKHTVg3gdCKoQxrMYxRbUbxn8H/4evdX/PQrIc8PgBWRMTTClJQvw+sNsb83hjzErAKeM+zsaS0aF+nPRbrtmIlLiGORtUb0bB6Q7ecT/IWXiWcmLoxbpk+LyMrg+nbptPl3S70mtKL1Ymrean3S+x/cj+ta99wSnuP8/fzp1VIKy1B7kEHzxxkzp45PNT+IYL8gwAY32k8f+j9Bz7c9CFPf/s02RM/iYiUTAE32sFa+5ox5jsgluxp7h601npmtQ4pdaLDooHsgYldw7sW61wZWRksTljM6Daj3ZBMbmRY82G8sPgFjp47Sp3KhV9n6XzaeSZvmMzrq15n/+n9NK3RlAlDJnBfu/soH1jeA4mLLio0innx85yOUWq9u/5drLWM7Xh1b8Hf3vRbklOT+efqf1KzQk1+e9NvHUooIlI8hRnVZa75V+SGIqpEUD24ultm+vjh8A+cSzun7h5eMrzFcABm7ZpVqOOOnjvKcwufI+L1CJ6c9yT1Ktfjq59+xc7/28mjMY/6XDEN2QMTj50/xsmLJ52OUuqkZ6bz7vp3GdRkEJHVIq96zhjDawNf49629/LC4hd4+4e3nQkpIlJMBZnl40WylwmvDtQC3jfG6DaCFIgxJntgohuWII/bF4fB0KdhHzckkxtpHdKaxtUbF7jbx/ak7Tw08yEi/xXJX5b9hX4N+7FizAqWjVnGsBbDfHpWFi1B7jnf7P6Go+ePMi5mXJ7P+xk/3rvtPW5tdiuPz3mcT7Z+4uWEIiLFV5CfcHcDnay1v7fW/o7sWT7u9WwsKU2iw6LZfHxzsUfzxyXE0b5Oe2pVqOWmZHI9xhiGNR/GwoSFnLt8Ls99rLUsTljMkKlDaP1Wa6ZtncbYDmPZ/bPdfDbyM7pFdPNy6qK5MtOHBia63YR1E6hXuR6Dmw7Od59A/0A+vfNTejboyb1f3qvuNyJS4hSkoD4CBOf6vBz5rGIokpfosGguZVxiT/KeIp/jfNp5Vh5aqenyvGx4i+GkZab9qMDJyMpg2pZpxEyKoe+Hffnh8A/8ofcfOPjzg7w5+E2a1GjiUOKiCa0YSq0KtXSH2s32puxl/t75jO0wlgC/6w/ZKR9YnlmjZhFVO4rbP72dFYdWeCmliEjxFaSgPgNsM8ZMMca8D2wFThtj3jDGvOHZeFIa5CxBXpx+1EsPLCU9K139p72se0R3alWodWWRl3OXz/HPVf+k8RuNueuLu7iQdoF3hr7DgacO8EKvF0rsXw+MMbSp3UZ3qN1s0vpJ+Bt/Hu7wcIH2rxpclXn3zCO8SjhDpg5h8/HNHk4oIuIeBSmovwSeAxYD3wHPk71k+DrXQ+S6WtRqQZB/ULEK6rh9cZTzL+f4MtVljb+fP7c2u5XZu2fz67hfE/F6BD//9uc0qNqAmaNmsv3x7YztONYnBxoWVlTtKLae2Ko5kd3kcsZlJm+YzNBmQ6lXpV6Bj6tdsTYL7l1AxcCKDPx4IHtT9nowpYiIe9ywoLbWfpDzAGYBG67ZJnJdgf6BtKndplhLkMclxNGjfo9SUbiVNMNbDOfM5TP8bcXfGNB4AKseWsWSB5dwW/PbfHqgYWFF1Y7iQvoF9p/e73SUUuHLnV+SdDEp38GI19OgWgPm3zuftMw0Bnw8gKPnjnogoYiI+xRklo/vjDFVjDE1gPXAJGPMa56PJqVJdGj2EuRFWbzh+PnjbD6+Wf2nHTKk6RDeH/Y+u/9vN9NHTKdLeBenI3mEliB3r4nrJhJZLZIBjQcU6fhWIa2Ye/dcjp8/zsCPB3Iq9ZSbE4qIuE9Bbi9VtdaeBW4HPrTWdgFU2UihRIdFk3QxiaPnC3+naVHCIkDLjTvF38+fB6IfoHGNxk5H8ajWIdmrNqofdfHtPLmT7/Z/xyMdHinWXzE61+vMV6O+YlfyLoZOG8qFtAtuTCki4j4F+U4XYIypA4wEvvFwHiml2tcp+sDEuH1xVAuuRoc6HdycSuR/KperTMNqDbUEuRtMXDuRAL8AxrQfU+xz9W/Un6m3T2VV4irunHEnaZlpbkgoIuJeBSmo/wB8C8Rba38wxjQCij7/mZRJbUPbAoUvqK21LNi3gL4N++Lv5++BZCL/ExUapTvUxZSansoHmz7g9pa3E1op1C3nvKPVHUwcOpF58fO4/6v7yczKdMt5RUTcpSCDEmdYa9taax9zfb7PWnuH56NJaVKlXBUaV29c6II6PiWeQ2cPqf+0eEVU7Sh2ndzF5YzLTkcpsWZsn8GpS6d4tOOjbj3vwx0e5q/9/8onWz/hiblPFGk8hoiIp1x/pn3AGBMCjAUic+9vrS3+3/KkTCnKEuRx++IA9Z8W74iqHUWmzWTnyZ20C2vndJwSacLaCTSr2Yw+kX3cfu5nezzLyYsn+duKv1GzQk3+0OcPbm9DRKQoblhQkz3n9FIgDtDf2aTIosOi+XzH55y7fI7K5SoX6Ji4hDjqV61f4lbek5Ip9xLkKqgLb/PxzaxMXMk/BvwDY4xH2vhr/7+SkprCH5f8kZrla/Jk1yc90o6ISGEUpKCuYK39lceTSKmXs2Li5uOb6VG/xw33z8zKZFHCIm5vcbvHfjiL5Na0RlOC/IM0dV4RTVw7kXL+5bi/3f0ea8MYw4ShEzh16RRPffsU1ctX575293msPRGRgijIoMRvjDGDPZ5ESr3osGig4AMT1x9dz+lLp9XdQ7wm0D+QlrVaamBiEZxPO89Hmz9iROsR1KxQ06NtBfgFMPX2qfRr2I8xM8cwa9csj7YnInIjBSmonyS7qE41xpw1xpwzxpz1dDApfepWrkutCrUKXFDn9J/u27CvB1OJXE0zfRTNJ1s/4VzaOcZ1LPzKiEVRLqAcX/70SzrU6cDIGSNZcWiFV9oVEclLQWb5qGyt9bPWlrfWVnF9XsUb4aR0McYUamBiXEIcbUPbum3qLZGCaBPShsSziVqZr5AmrJ1A65DWdI/o7rU2K5erzJy75xBRNYLhnwzXsvEi4piCLD1+U14Pb4ST0ic6NJqtJ7aSnpl+3f0upl9k2cFlmi5PvC5nYKIWeCm4tUfWsu7oOsbFjPP6eIdaFWrx9eivSc9K59Zpt3L2sv6AKiLeV5AuH8/kerwAfA383oOZpBRrX6c9lzMvsyt513X3W35wOWmZaeo/LV4XVft/M31IwUxcO5EKgRW4t+29jrTfolYLZoyYwY6kHYz+fLQWfhERrytIl49bcz1uBtoA+luoFElBBybG7Ysj0C+Qng16ej6USC7hVcKpWq6q7lAX0JlLZ5i6dSqjWo+ianBVx3L0b9SfNwe/yZw9c/jl/F86lkNEyqaC3KG+ViLQ0t1BpGxoVrMZwQHBNy6oE+LoFtGNSkGVvBNMxMUYo4GJhfDx5o+5mH6RcTHeGYx4PeNixvFklyf55+p/MnHtRKfjiEgZUpCVEv8N5Kzx6gdEA+s9mElKsQC/AKJqR123oD558SQbjm7gpd4veS+YSC5RtaOYumUq1lrNgX4d1lomrptIhzodiKkb43QcAP4x4B/sSdnD43Mep3GNxuo2JiJeUZA71GuBda7HSuBX1tp7PJpKSrWcmT6stXk+vzhhMRarH4TimKjaUZy5fIbEs4lOR/FpKxNXsuXEFh7t+KjP/OLh7+fPtDum0TKkJSNmjGDXyeuP1xARcYeC9KH+IOcBzAHOeT6WlGbtw9qTkpqSb7ESty+OykGV6VSvk5eTiWTLvQS55G/C2glUDqrM6DajnY5ylSrlqvD16K8J9Atk6LShJF9MdjqSiJRyBZk27ztjTBVjTA2yu3pMMsa87vloUlrdaGBiXEIcfRr2IcDvhj2SRDyiTe02AFqC/DpSUlOYvm0697S9h8rlKjsd50ciq0Xy1aivOHjmIHdMv4O0zDSnI4lIKVaQLh9VrbVngduBD621XYB+no0lpVlUaBQGk2dBve/UPvad2qf5p8VR1YKrEVElQneor+ODjR9wOfMyj3Z81Oko+eoe0Z3Jt03m+wPfM/6b8fl2MxMRKa6CFNQBxpg6wEjgGw/nkTKgUlAlmtZsysbjG3/03MJ9CwHUf1ocp5k+8pczGLFreFfahbVzOs513d32bn7b87dM3jiZf6z8h9NxRKSUKkhB/QfgWyDeWvuDMaYRsMezsaS0iw6LZsPRHy9BHpcQR93KdWlRq4UDqUT+J6p2FDuSdtxwVc+y6PsD37MreRfjOjo/VV5BvNTnJUa0GsGzC55l1q5ZTscRkVKoIIMSZ1hr21prH3N9vs9ae4fno0lpFh0aTcLpBE5fOn1lW5bNYuG+hfRv1N9nZgyQsiuqdhTpWensTt7tdBSfM2HtBKoFV2Nk65FORykQP+PHlOFT6Fi3I3d9ftcN58EXESmsoizsIlJs7eu0B2Dz8c1Xtm06tonk1GT1nxafcGVgorp9XOXEhRN8seML7m93P+UDyzsdp8AqBFZg1qhZVC9fndum3cax88ecjiQipYgKanFEXjN9xO2LA6BfI415Fee1qNUCf+OvJciv8f6G90nPSvfpwYj5qVO5DrNGzSI5NZlhnwwjNT3V6UgiUkqooBZHhFUKI7Ri6NUFdUIcrUJaUbdyXeeCibiUCyhH81rNdYc6lyybxcR1E+nVoBctQ1o6HadI2tdpz39v/y8/HP6BB2c+qJk/RMQtCjIP9W9zfVzOs3GkLIkOi75SUF/KuMTSA0vV3UN8SlTtKM1FncuCvQtIOJ1QIu9O5za8xXD+0v8vfLrtU176/iWn44hIKZBvQW2M+ZUxphtwZ67NKz0fScqK6LBotp7YSlpmGisPrSQ1I1XT5YlPiaodRcLpBM5d1gKxABPXTaRWhVrc3vJ2p6MU2zPdn+HB6Ad56fuXmLZlmtNxRKSEu94d6p3ACKCRMWapMWYSUNMY09w70aS0ax/WnvSsdHYk7SBuXxz+xp9ekb2cjiVyRc4S5OpHDYfPHmbWrlmMiR5DuYCS/8dKYwwThk7gpgY38eDMB1mVuMrpSCJSgl2voD4NPAfEA72Bf7m2/9oYs8KzsaQsyD0wMS4hji7hXahSroqzoURy6VinIwBrDq9xOInz3tvwHpk2k7EdxzodxW2C/IP4fOTnhFcJZ9gnwzhw+oDTkUSkhLpeQT0QmA00Bl4DugAXrLUPWmu7eyOclG5NajShQmAFFu9fzNoja9V/WnxOvSr1qF+1PisTy3Zvt4ysDCatn8TNjW6mSY0mTsdxq1oVavHNXd9wOeMyt067Vd17RKRI8i2orbXPWWv7AfuBjwB/IMQYs8wY87WX8kkp5u/nT9vQtkzbOo0sm6X+0+KTukd0Z8Whsv1Hubl75pJ4NpFxMSVjZcTCalGrBTNGzGB70nZGfz6azKxMpyOJSAlTkGnzvrXWrrXWvgMkWmtjgQc9nEvKiOjQaNIy06gYWJEu4V2cjiPyI93Cu3Ho7CESzyY6HcUxE9ZNIKxSGLc2u9XpKB5zc+Ob+fct/2b2ntk8u+BZp+OISAlTkKXHc39necC17aSnAknZkrNiYq/IXgT5BzmcRuTHukdk93BbeahsdvvYf3o/c/fM5eH2DxPoH+h0HI8a32k8T3R+gtdWvcakdZOcjiMiJUihFnax1m7yVBApm9qHZRfU6j8tvqpdaDvKB5Qvs90+3l3/LsaYUjUY8Xr+MfAfDGoyiMfmPFZmr7mIFJ5WShRHxdSN4f1h75eZH9ZS8gT6B9KpXqcyOTAxPTOdd9e/yy1NbqF+1fpOx/GKAL8APrnjEyKqRHDX53dx5tIZpyOJSAmgglocZYzhgegHqBRUyekoIvnqHt6d9UfXcynjktNRvGrmrpkcv3C81A5GzE/V4KpMvWNq9kDM2eO0PLmI3JAjBbUx5ufGmG3GmK3GmGnGmGBjTENjzGpjTLwx5lNjjDrUiohP6BbRjfSsdNYdWed0FK+auG4iEVUiuKXJLU5H8bqu4V15qfdLfLL1Ez7c9KHTcUTEx3m9oDbG1AOeAGKstW3Ino5vFPBX4HVrbRPgFPCQt7OJiOSlW3g3gDLVp3bXyV3E7YtjbIex+Pv5Ox3HEb+O/TW9GvTi8TmPE58S73QcEfFhTnX5CADKG2MCgArAUaAv8Jnr+Q+A4c5EExG5WkjFEJrUaMKKxLJTUL/0/UuUDyhfpsc3+Pv589FPPiLIP4jRn48mLTPN6Ugi4qO8XlBbaw8DfwcOkl1InwHWAaettRmu3RKBet7OJiKSn+4R3Vl5aGWZ6E+76dgmpm2dxpNdniSsUpjTcRwVUTWCd297l7VH1vLi4hedjiNSpmVmZrJr1y6nY+TJiS4f1YFhQEOgLlARGFSI4x8xxqw1xqxNSkryUEoRkat1C+/G8QvHSTid4HQUj3t+0fNUC67Gsz20wAnA7S1v55EOj/Dq8ldZuG+h03FEyqSDBw8yceJEPvnkE44fP+50nB9xostHfyDBWptkrU0HvgB6ANVcXUAAwoHDeR1srX3HWhtjrY0JCQnxTmIRKfPKygIvyw8uz14tsPuzVC9f3ek4PuO1ga/RvFZz7v3yXk5e1NpmIt6SmprKrFmzeP/990lLS2P06NGEhoY6HetHnCioDwJdjTEVjDEG6AdsBxYDd7r2uR+Y6UA2EZE8tQ5pTeWgyqV6YKK1lt8s/A2hFUN5ossTTsfxKRWDKjLtjmkkpybz0KyHykTXHxEnWWvZvHkzb775Jhs3bqRbt2489thjNGvWzOloeXKiD/Vqsgcfrge2uDK8A/wK+IUxJh6oCbzn7WwiIvnx9/OnS3iXUj0wcV78PJYeXMoLN71AxaCKTsfxOdFh0fy1/1+ZtWsWb6992+k4IqVWcnIyH330EV9++SXVq1fnkUceYcCAAQQF+e6MyqYk/5YdExNj165d63QMESkjfrf4d7y89GXO/PpMqVuMKMtm0fGdjpy5dIad/7eTIH/f/cHlJGstQ6YOYfH+xfww9gfa1G7jdCSRUiMjI4Ply5ezdOlSAgIC6NevHx07dsTPzzfWITTGrLPWxuT1nG8kFBEpAbpFdCPLZrHm8Bqno7jdjG0z2HhsIy/1fknF9HUYY3h/2PtULVeV0Z+PJjU91elIIqXC/v37mTBhAt999x0tWrTg8ccfp1OnTj5TTN9IyUgpIuIDuoZ3BUrfwMT0zHReWPwCrUNac1fUXU7H8XmhlUKZMnwKW09s5dkFmgmltDh16hSrV68mMTGRjIyMGx8gbnHx4kVmzpzJBx98QGZmJnfffTd33nknlStXdjpaoQTceBcREQGoFlyN1iGtS10/6ikbp7AnZQ8zR80ss6siFtagJoP4edef8/qq1xnQeAC3Nr/V6UhSTAcOHGDevHkA+Pn5ERoaSr169a48atasWWLulpYE1lo2bdrE/PnzuXz5MrGxsdx0000EBgY6Ha1I1IdaRKQQxs4ay+c7PufksyfxMyX/h2tqeipN/92U+lXrs3zMcrInX5KCuJxxma7vdeXQmUNsHr+ZupXrOh1Jiuns2bMcOXKExMREjhw5wuHDh0lLy14hMygoiLp1615VZFeuXFlfM0Vw8uRJvvnmGw4cOEBERARDhw6ldu3aTse6oev1odYdahGRQuge0Z13N7zL7uTdtKjVwuk4xfafH/7D4XOH+fj2j1UYFFK5gHJMu2MaHSZ24P6v7ufbe74tFb9klWVVqlShSpUqtGiR/bVtreXkyZMcPnyYw4cPc+TIEVauXElWVhYAlSpVuqrArlu3LsHBwU6+BJ+WkZHB0qVLWbZsGUFBQQwdOpQOHTqUiu89KqhFRAqhW0Q3AFYcWlHiC+ozl87w52V/ZkDjAfSO7O10nBKpRa0W/GvQv3jkm0f4x4p/8EyPZ5yOJG5kjCEkJISQkBCio6OB7KLw2LFjVwrsw4cPX7Ucds2aNa8qskNDQwkIULm1b98+Zs+eTUpKClFRUQwYMIBKlUrPbEm6wiIihdCsZjNqlK/BykMrGdN+jNNxiuUfK/9BSmoKr/R9xekoJdrDHR7m273f8tyi5+jTsA8xdfP8i7CUEgEBAYSHhxMeHn5lW2pq6pXi+siRI+zdu5fNmzcDUL58ecaMGUOtWrWciuyoCxcuMH/+fDZv3kyNGjW49957adSokdOx3E59qEVECmno1KEknE5g22PbnI5SZCcunKDRvxpxS9NbmDFihtNxSryU1BTaTWhH+YDyrH90fambp1wKx1rL2bNnSUxM5Ouvv6Zu3brce++9paJrQ0FZa9mwYQMLFiwgLS2N2NhYevbsWaLv1mseahERN+oW3o3tSds5lXrK6ShF9srSV0jNSOWPff7odJRSoUb5Gnz8k4+JT4nniblatr2sM8ZQtWpVWrduTb9+/UhISGDbtpL7C3hhpaWlMWPGDL7++mtCQ0MZN24cffr0KdHF9I2ooBYRKaTuEd0BWH14tcNJiubA6QO8vfZtHmj3QInvB+5LekX24vmez/P+xvf5dOunTscRH9GxY0fq1KnDt99+y+XLl52O43Fnz55lypQp7Nixg5tvvpn777+fkJAQp2N5nApqEZFC6lSvE37GjxWHSuZ81C99/xIAv+v9O4eTlD4v9nqRruFdefSbR9l/er/TccQH+Pn5MWTIEM6fP8/ixYudjuNRR44cYdKkSSQnJzN69Gi6d+9eZrq5qKAWESmkSkGVaBfajpWJJW/FxB1JO/hg0wc8FvMY9avWdzpOqRPoH8jU26eSZbO4+4u7ycjSinsC9erVo2PHjqxZs4Zjx445Hccjtm/fzvvvv4+/vz9jxoyhWbNmTkfyKhXUIiJF0C28G6sSV5GZlel0lEJ5YfELVAiswHM9n3M6SqnVsHpDJgydwIpDK3h5yctOxxEf0a9fP8qXL8/s2bMpyRNCXMtay5IlS5gxYwZ16tRh7NixhIaGOh3L61RQi4gUQfeI7pxPO8/WE1udjlJga4+s5fMdn/OLrr8gpGLp79PopLui7uLetvfyxyV/ZNnBZU7HER9Qvnx5br75ZhITE9m4caPTcdwiIyODL7/8ksWLF9O2bVvuu+8+Klas6HQsR6igFhEpgpyBiSWp28dzC5+jZvmaPN39aaejlAn/GfwfIqtFcvcXd5foGWHEfdq1a0f9+vVZsGABFy9edDpOsZw/f54pU6awZcsW+vbty/Dhw0v1LB43ooJaRKQIIqtFEloxtMQMTFycsJgF+xbwm9jfUKVcFafjlAmVy1Vm2h3TOHLuCONmjytVf+aXojHGMHjwYC5dusTChQudjlNkx44dY9KkSZw4cYKRI0fSs2fPMjP4MD8qqEVEisAYQ/eI7iXiDrW1lt8s/A31KtfjsU6POR2nTOlcrzN/7PNHpm+bzvsb33c6jviA0NBQunTpwvr160lMTHQ6TqHt2rWLyZMnY63lwQcfpGXLlk5H8gkqqEVEiqhbeDfiU+I5ceGE01Gua9auWaw+vJrf9/495QPLOx2nzHmm+zP0iezDz+b+jMSzJa+AEvfr3bs3lStXZvbs2WRlZTkdp0CstSxfvpxPPvmEkJAQxo4dS506dZyO5TNUUIuIFNGVftSHfPcudWZWJs8vep5mNZvxQPQDTscpk/z9/Hnn1ne4mH6R/27+r9NxxAeUK1eOgQMHcuzYMX744Qen49xQZmYms2bNIi4ujtatW/PAAw9QuXJlp2P5FBXUIiJF1LFuRwL9An2628fULVPZlrSNP/b5IwF+ZXfAkNOa1GhC1/CuTNs6zeko4iNatWpFo0aNWLx4MefPn3c6Tr4uXrzIhx9+yMaNG+nVqxd33HEHgYGBTsfyOSqoRUSKKDggmA51OvjswMS0zDR+993vaB/Wnjtb3el0nDJvdJvRbDq+iR1JO5yOIj4gZ4BiRkYG8+fPdzpOnpKSknj33Xc5fPgwd9xxB7179y7zgw/zo4JaRKQYukd054cjP5Ceme50lB+ZtG4SCacTeKXfK/gZfbt32sjWI/EzfrpLLVfUrFmT7t27s2XLFhISEpyOc5X4+Hjee+890tLSeOCBB2jTpo3TkXyavsOKiBRDt/BuXMq4xMZjG52OcpULaRf445I/clODmxjYeKDTcQQIqxRGn8g+TNs6TVPoyRU9e/akWrVqzJkzh8xM51detdayevVqpk6dSrVq1Rg7dizh4eFOx/J5KqhFRIqhW0Q3AJ/r9vHG6jc4fuE4f+73Z/2J1oeMbjOa+JR41h5Z63QU8RGBgYHccsstnDx5kpUrnR2PkZmZyZw5c5g3bx7NmjVjzJgxVK1a1dFMJYUKahGRYgivEk79qvV9amDiqdRTvLriVYY2G3plJhLxDbe3vJ0g/yB1+5CrNGvWjObNm7NkyRLOnDnjSIbU1FT++9//snbtWnr06MFPf/pTgoKCHMlSEqmgFhEppm7h3XzqDvWry1/lzKUz/Knvn5yOIteoXr46tzS5hU+3fUpmlvN/3hffMWjQIKy1zJs3z+ttJyUl8d5773HgwAGGDRtG//799ZetQlJBLSJSTN0junPo7CGfWLTj6Lmj/Gv1vxgdNZq2oW2djiN5GN1mNEfOHWHpwaVORxEfUq1aNXr16sXOnTvZvXu3V9q01rJ27VreeecdUlNTue+++4iOjvZK26WNCmoRkWLqFp7dj9oXFnh5ecnLpGel84fef3A6iuTj1ua3UjGwIlO3THU6iviYbt26UatWLebOnUt6umdnDrp48SLTp09n9uzZNGjQgHHjxtGgQQOPtlmaqaAWESmm6LBoygeUd7zbx96Uvbyz/h0ebv8wjWs0djSL5K9CYAWGtRjGZ9s/Iy0zzek44kP8/f0ZPHgwp0+fZtmyZR5rJyEhgQkTJrB7924GDBjA3XffrZUPi0kFtYhIMQX6B9KpXifHByb+7rvfEegXyAu9XnA0h9zYXW3u4tSlU8zf65sLeohzGjZsSFRUFMuXLyc5Odmt587MzGThwoV8+OGHBAUF8fDDD9OtWzf1l3YDFdQiIm7QLbwb64+u51LGJUfa33J8C1O3TOVnnX9G3cp1HckgBXdz45upUb6GZvuQPN18880EBAQwd+5ct81ZnpKSwuTJk1m2bBkdOnTgkUceoU6dOm45t6igFhFxi+4R3UnPSmfdkXWOtP/8ouepUq4Kv4r9lSPtS+EE+QdxZ8s7mblzJhfSLjgdR3xM5cqV6dOnD3v37mXHjuItVW+tZdOmTUycOJGUlBRGjBjBrbfeqinx3EwFtYiIG3QN7wo4s8DL6sTVfL37a57p/gw1ytfwevtSNKOjRnMh/QJf7/7a6Sjigzp16kRYWBjz5s3j8uXLRTrHpUuX+OKLL/jqq6+oU6cO48aNo1WrVm5OKqCCWkTELWpXrE2TGk1Ykej9gvrlpS9To3wNnuz6pNfblqLrWb8ndSvXVbcPyZOfnx+DBw/m3LlzfP/994U+/tChQ0ycOJFt27bRp08f7rvvPq166EEqqEVE3KRbeDdWHlrptj6PBbHx2Ea+2f0NP+/6cyoFVfJau1J8/n7+jGo9irl75nIq9ZTTccQHRURE0L59e1atWsWJEycKdExWVhbff/8977//PgBjxozhpptuws9PJZ8n6d0VEXGT7hHdOX7hOAmnE7zW5itLX6FKuSr8X+f/81qb4j6jo0aTnpXOFzu+cDqK+Kj+/fsTHBzM7Nmzb/jL+pkzZ/jggw/47rvvaNOmDePGjSM8PNxLScs2FdQiIm7SPaI74L0FXnYk7eCz7Z/xf53+j2rB1bzSprhXxzodaVKjCVO3apEXyVuFChXo378/Bw8eZNOmTfnut23bNiZMmMCxY8f4yU9+wu233065cuW8mLRsU0EtIuImrUNaUzmostcGJv552Z8pH1iep7o+5ZX2xP2MMYxuM5rFCYs5eu6o03HER7Vv357w8HAWLFhAamrqVc+lpaUxc+ZMPvvsM2rWrMmjjz5K27ZtHUpadqmgFhFxE38/f7qEd/HKwMR9p/YxdctUxnUcR0jFEI+3J54zus1oLJbp26Y7HUV8lDGGIUOGkJqayqJFi65sP3LkCO+88w4bN26kZ8+ePPjgg9SooZl+nKCCWkTEjbqFd2Pz8c2cTzvv0Xb+uuyvBPgF8HT3pz3ajnhey5CWRIdFa7YPua6wsDA6derE2rVrOXz4MCtWrOC9994jPT2d+++/n759++Lv7+90zDJLBbWIiBt1j+hOls1izeE1Hmsj8Wwi7298nzHtx2hVxFJidJvRrD68mr0pe52OIj6sT58+VKpUiSlTprBgwQKaN2/OuHHjiIyMdDpamaeCWkTEjXIWePHkwMS/Lf8bFsuzPZ71WBviXaPajALgk62fOJxEfFlwcDCDBw8mKCiIoUOHMmLECMqXL+90LAECnA4gIlKaVAuuRquQVh7rR338/HHeWf8O97a9l8hqkR5pQ7yvftX69IjowbSt03j+puedjiM+rGXLlrRo0QJjjNNRJBfdoRYRcbPu4d1ZeWglWTbL7ed+fdXrpGWm8evYX7v93OKsu6LuYlvSNrYc3+J0FPFxKqZ9jwpqERE36xbRjVOXTrE7ebdbz5uSmsJ/fvgPI1uPpFnNZm49tzhvRKsR+Bt/DU4UKYFUUIuIuFnOAi/uno/6jdVvcD7tPM/FPufW84pvCKkYQv9G/Zm2dZpXl68XkeJTQS0i4mbNajajRvkabh2YePbyWf61+l8MbzGcqNAot51XfMvoNqPZf3o/qxJXOR1FRArBkYLaGFPNGPOZMWanMWaHMaabMaaGMWaBMWaP69/qTmQTESkuP+NH1/Cubh2Y+PYPb3P60mme76kBa6XZT1r+hHL+5dTtQ6SEceoO9b+AedbaFkA7YAfwa2ChtbYpsND1uYhIidQ9vDvbk7ZzKvVUsc91Mf0i/1j5DwY1GURM3Rg3pBNfVaVcFYY2G8r0bdPJyMpwOo6IFJDXC2pjTFXgJuA9AGttmrX2NDAM+MC12wfAcG9nExFxl24R3QBYfXh1sc81ad0kki4m6e50GTG6zWiOXzjO4oTFTkcRkQJy4g51QyAJeN8Ys8EY864xpiIQaq096trnGBDqQDYREbfoXK8zfsav2AMTL2dc5tUVr9KrQS9i68e6KZ34ssFNB1M5qLK6fYiUIE4U1AFAB+Bta2174ALXdO+w2cOb8xzibIx5xBiz1hizNikpyeNhRUSKolJQJdqGtmVlYvEGJn6w6QOOnDvCb2/6rZuSia8rH1ien7T8CV/s+ILLGZedjiPiUzIu+2ZXKCcK6kQg0Vqb83fQz8gusI8bY+oAuP49kdfB1tp3rLUx1tqYkJAQrwQWESmK7uHdWZW4isyszCIdn56Zzp+X/Zku9brQr2E/N6cTXza6zWjOXD7D3Pi5TkcR8QmZaZks/fNS3mj8BhdOXHA6zo94vaC21h4DDhljmrs29QO2A7OA+13b7gdmejubiIg7dY/ozvm082w9sbVIx0/bOo39p/fzfM/ntTJaGdOvYT9CKoSo24cIcHDZQSZ2mMii5xYR3iUcm+V787QHONTuz4D/GmOCgH3Ag2QX99ONMQ8BB4CRDmUTEXGLnIGJKxNX0i6sXaGOzczK5JWlr9AutB1Dmw31RDzxYYH+gYxoNYLJGydz7vI5Kper7HQkEa9LTUllwa8WsOHdDVStX5VRs0bR/NbmNz7QAY5Mm2et3ejqttHWWjvcWnvKWptsre1nrW1qre1vrU1xIpuIiLs0rNaQ0IqhRRqY+MWOL9iVvEt3p8uw0VGjuZRxiZm79AdbKVustWz+eDNvtniTje9vpNsvu/HYtsd8tpgG5+5Qi4iUesYYukV0K/TARGstLy99mRa1WnB7y9s9lE58XfeI7kRUiWDa1mnc0/Yep+OIeEXynmRmj59NwsIE6nWpx70L7iWsXZjTsW5IS4+LiHhQ9/DuxKfEc+JCnuOs8/TN7m/YfHwzv4n9Df5+/h5MJ77Mz/gxqs0o5u+dz8mLJ52OI+JRGZcz+P6P3/N21Nsc+eEIg/8zmDHLx5SIYhpUUIuIeFT3iO4ArDxUsLvUOXenG1ZryOg2oz0ZTUqAu6LuIiMrg8+2f+Z0lFIvPTWdS2cuOR2jTNr//X4mtJvAdy9+R4thLXh85+N0eqwTfv4lp0xVlw8REQ/qWLcjgX6BrExcybAWw264/8KEhaw5vIaJQycS6B/ohYTiy9qFtqNFrRZM2zqNcTHjnI5TqsXPi2f6HdOp1aIW9TrXu/IIbRuKf5D+UuQJF09eZMEzC9g4ZSPVGlbj7rl302RQE6djFYkKahERDwoOCKZDnQ4FHpj48pKXqVe5Hve3u//GO0upZ4xhdJvR/P6735N4NpHwKuFORyq1QlqF0OcPfTi85jDxc+PZ9MEmAPyD/AlrH3ZVkV2jSQ2MnwYLF5W1lk0fbGL+L+dz+cxlevy6B71e6EVghZJ7E0EFtYiIh3UL78aEdRNIz0y/7l3npQeW8v2B7/nnwH9SLqCcFxOKLxvdZjS/++53fLr1U57u/rTTcUqtWs1rcdNvbwKyC76zh85yeM3hK48Nkzew5t9rAAiuFkzdTnWvKrIrhVVyMn6JcXLnSb4Z9w0Hvj9ARPcIhk4cSu02tZ2OVWwme5XvkikmJsauXbvW6RgiItc1Y9sMRn42kjUPr6FTvU757jfo40GsP7qe/U/tp0JgBS8mFF8X804MAGsf8dzPvPTMdF5Y/AKTN0xm5UMraVyjscfaKomyMrM4uePkVUX28c3HsZnZdVSViCr/K7C71KNux7oEVQpyOLXvyLiUwdJXlrLsL8sIqhhE/1f70+GhDiXqTr8xZp21Niav53SHWkTEw3IWeFlxaEW+BfXaI2v5du+3/KXfX1RMy4/cFXUXT89/mt3Ju2lWs5nbz59wKoG7vriLVYmrAPjvlv/yYq8X3d5OSebn70ftNrWp3aY27ce0ByD9YjrHNh67qsje8fkOAIyfIaRVCJF9I+n/l/4Eli+53RmKa9/CfcweP5uUPSlE3R3FgH8MoFJo6bqjX3KGT4qIlFDhVcKJqBJx3fmo/7T0T1QPrs74TuO9mExKip+2/ikGw7Qt7l+KfMa2GbSf2J7tSduZfud0YuvHMmP7DLe3UxoFVggkonsEXZ/qyh1T7+CJ+Cd4JukZ7p57N71+14sqEVVY88Yalv1lmdNRHXHhxAW+vPdLPur/EVi4Z/493P7x7aWumAYV1CIiXtE9onu+AxO3HN/CVzu/4okuT1ClXBUvJ5OSoF6VetzU4CambZ2Gu7pqpqanMu6bcYz8bCTNazVn46MbGdF6BCNajWDria3sSNrhlnbKmgq1KtBkUBN6vdiLu+fcTdRdUSz/63KS9yQ7Hc2rds3axZst3mTrp1vp+duejNs8jsY3l95uRCqoRUS8oFt4Nw6dPUTi2cQfPffKsleoFFSJJ7o84UAyKSlGtxnNruRdbDy2sdjn2nZiG53f7czEdRN5tvuzLHtwGQ2rNwTgjpZ3AOgutZvc/PebCSgXwNyfzXXbL0O+zFrL8leX88nwT6jRuAbjNo2j7x/7lvouLyqoRUS8IL8FXnYn72b6tuk83ulxapSv4UQ0KSHubHUnAX4BTN0ytcjnsNby7vp36TSpEycunGDe3fP4681/vWr2mXpV6qnbhxtVrlOZPn/sw95v917pX11aZVzOYNaYWcT9Ko7WI1rzwJIHCGkZ4nQsr1BBLSLiBe3C2hEcEPyjbh9/WfYXyvmX4xfdfuFQMikpalaoycDGA/lk2ydk2axCH3/m0hlGfz6asV+PpUf9Hmwat4mBTQbmua+6fbhXp8c6ERYdxryn5nH53GWn43jExZMX+ejmj9g4ZSO9fteLOz65o9Tflc5NBbWIiBcE+QfRqW6nqwYm7j+9n482f8TYDmOpXbHkz8Mqnje6zWgSzyay/ODyQh235vAa2k9sz2fbP+PP/f7Mt/d8S1ilsHz3V7cP9/IL8GPwW4M5d/gc3//he6fjuF3S9iQmdZ7EkR+OcMe0O+j9+94YU3Kmw3MHFdQiIl7SPaI764+u51LGJQBeXf4qBsMzPZ5xOJmUFMNaDKN8QHmmbS3YbB9ZNou/Lf8bPSb3IMtmseTBJfw69tf4mev/+K9XpR49InqooHajiG4RtH+4PateX8WJrSecjuM28fPiea/be6RfTOf+7+6nzag2TkdyhApqEREv6RbejfSsdNYdWceRc0eYvGEyD0Y/qOWkpcAqBVXi1ua3MmP7DNIz06+774kLJxj838E8G/csw5oPY8OjG6705S+Ika1HsvXEVnae3Fnc2OLS/y/9Ca4WzOzHZpf4AYrWWla/sZqpQ6ZSrWE1xv4wlvAuZfd7mQpqEREvyb3Ayz9W/IOMrAx+Ffsrh1NJSTO6zWhOXjxJ3L64fPdZuG8h7Sa047v93/H2kLeZMWIG1ctXL1Q7V7p9bNNdanepULMC/f/Sn4NLD7L5o81OxymyzPRMZj82m3lPzqP5bc0Zs2wMVSOqOh3LUSqoRUS8pHbF2jSu3pivd3/NhHUTuLvt3TSq3sjpWFLC3NLkFqqWq5pnt4+MrAyeX/g8N390M9WDq7Nm7BrGxYwrUn9WdfvwjPZj2hPeNZz5v5xP6qlUp+MUWuqpVP57y39ZN2EdPX7Vg5Gfj9QS66igFhHxqu4R3Vl6cCmp6an8JvY3TseREqhcQDnuaHkHX+78ktT0/xVkB04foNeUXryy7BXGtB/DD2N/oG1o22K1NaLVCLac2KJuH25k/AxD3h5CanIqi367yOk4hZK8O5n3ur7HgSUHGDZlGP3/0h/jV7YGH+ZHBbWIiBfl9GG9s9WdtKjVwuE0UlKNjhrN+bTzzN4zG4AvdnxB9MRothzfwtTbp/Lube9SMahisdu5s9WdgLp9uFtYdBid/q8Ta99ey5G1R5yOUyAJixJ4t+u7pKakcv+i+4m+P9rpSD5FBbWIiBcNbjqY6LBoft/7905HkRKsT2QfQiuG8v7G93l89uPcMf0OmtRowoZHNzA6arTb2lG3D8/p84c+VAqtxOzxs8nKLPy84t607p11fDzwYyrXqczDqx+mfmx9pyP5HBXUIiJeVL9qfTY8uoFWIa2cjiIlmL+fPyNbj2TOnjm8tfYtnu72NMvHLKdxjcZub0vdPjwjuGowA/4xgCNrj7B+0nqn4+QpKyOLeU/N45tHv6HRzY14aOVDVG9UuMGtZYUKahERkRLosU6P0SOiB7Pvms3fB/ydIH/PDAy7o5Vm+/CUNqPbENknkoW/WciFExecjnOVS2cuMe22aaz+12q6PNWF0bNGU65KOadj+SwV1CIiIiVQi1otWDZmGYObDvZoO+FVwtXtw0OMMQz+z2DSLqSx4NkFTse54lTCKSZ3n8y+BfsYMmEIg14fhF+ASsbr0bsjIiIi15XT7WPXyV1ORyl1QlqG0O3pbmz6YBMHlh5wOg4Hlx3k3c7vcu7IOe759h5iHo1xOlKJoIJaRERErutKtw/dpfaIm357E1XrV2XOY3PITM90LMfGDzbyQd8PCK4ezMOrH6Zh34aOZSlpVFCLiIjIdYVXCad7RHemb5vudJRSKahiEIPeGMSJrSdY/cZqr7dvsyxxv45j5gMzadCzAQ+vfpiazWp6PUdJpoJaREREbmhkq5Hq9uFBzW9rTtMhTfn+999zNvGs19o9e/gsH938Ecv/upyOj3bk7nl3U756ea+1X1qooBYREZEbUrcPzzLGcMsbt5CVkcW3v/jWK23unLmTCe0mkLgqkVsn3cqQt4fgH+jvlbZLGxXUIiIickM53T5UUHtO9UbV6fl8T7bP2M7e+Xs91k56ajqzH5/Np8M/pWr9qjyy7hE6PNwBY7SMeFGpoBYREZECGdFqBJuPb1a3Dw/q/kx3ajStwZzH55BxKcPt5z+++TiTYiax9q21dHu6Gw+tfIhaLWq5vZ2yRgW1iIiIFMidre4E1O3DkwLKBTD4zcGkxKew/G/L3XZeay2r/72aSZ0ncTH5Ivd8ew8D/j6AgHIBbmujLFNBLSIiIgWibh/e0XhAY1qNaMWyV5Zxat+pYp/vQtIFpt06jXlPzKNR/0aM3zyexgPcv0x9WaaCWkRERApM3T68Y+DrA/EL8GPuz+ZirS3yefbO38uEthPYF7ePQW8MYvTXo6lYu6IbkwqooBYREZFCULcP76hSrwq9X+rNnjl72DWz8L+8ZFzOYP4v5/PxwI8pX6M8Y9eMpcvPumjgoYeooBYREZECU7cP7+n8s87UblObuU/MJe1CWoGPO7nrJO91e4+V/1hJzGMxjF07ltC2oR5MKiqoRUREpFByun3sTt7tdJRSzT/QnyFvD+HsobMseXnJDfe31rL+3fW80+Edzhw8w6iZoxjynyEElg/0QtqyTQW1iIiIFMqVbh/bdJfa0+rH1if6gWhW/n0lSduT8t0v9VQqn438jK/Hfk14t3DGbx5P89uaezFp2aaCWkRERAolvEo43cK7qduHl/T/a3+CKgUx5/E5eQ5QPLDkABPaTWDnVzvp/2p/7p1/L5XrVnYgadmlglpEREQKbWTrkWw6vkndPrygYu2K9PtzP/Z/t5+t07Ze2Z6ZnsmiFxbxQZ8PCCgXwEMrH6LHMz0wfhp46G0qqEVERKTQ1O3DuzqM7UDdTnX59hffcunMJU7tO8WUm6aw9OWltLu/HY9ueJS6MXWdjllmqaAWERGRQlO3D+/y8/djyFtDuHDiAtNvn86E6Akk7Ujijk/uYNjkYQRVCnI6YpmmglpERESKZESrEer24UV1Y+oSMz6GhEUJhLYNZdzGcbT5aRunYwkqqEVERKSI1O3D+wb8fQCjZo3ige8eoFpkNafjiIsKahERESmSiKoR6vbhZYHlA2l+a3P8AlTC+RJdDRERESmynG4fe5L3OB1FxDEqqEVERKTIrnT70F1qKcMcK6iNMf7GmA3GmG9cnzc0xqw2xsQbYz41xmi4qoiIiI/L6fYxfdt0p6OIOMbJO9RPAjtyff5X4HVrbRPgFPCQI6lERESkUNTtQ8o6RwpqY0w4MAR41/W5AfoCn7l2+QAY7kQ2ERERKRx1+5Cyzqk71P8EngWyXJ/XBE5bazNcnycC9RzIJSIiIoUUUTWCruFdVVBLmeX1gtoYMxQ4Ya1dV8TjHzHGrDXGrE1KSnJzOhERESmKka1GsvHYRnX7kDLJiTvUPYDbjDH7gU/I7urxL6CaMSbAtU84cDivg62171hrY6y1MSEhId7IKyIiIjegbh9Slnm9oLbW/sZaG26tjQRGAYustXcDi4E7XbvdD8z0djYREREpGnX7EE87deoUCxYswFrrdJQf8aV5qH8F/MIYE092n+r3HM4jIiIihTCi1Qh1+xC3y8zMZNmyZbz11lusXbsWX+zy62hBba39zlo71PXxPmttZ2ttE2vtCGvtZSeziYiISOGo24e426FDh3jnnXdYuHAhTZo04fHHH6d27dpOx/qRgBvvIiIiInJj9avWv9Lt47mezzkdR0qw1NRU4uLiWL9+PVWqVGHUqFE0b97c6Vj5UkEtIiIibjOi1Qienv808SnxNKnRxOk4UsJYa9m6dSvffvstFy9epGvXrvTp04egIN9eQNuX+lCLiIhICXel28c2dfuQwklJSeHjjz/miy++oFq1ajzyyCMMHDjQ54tp0B1qERERcaPc3T5+0/M3TseREiAzM5Ply5ezZMkSAgICuOWWW4iJicHPr+Tc91VBLSIiIm5VErt9HDlyhOXLl1O3bl3q1atH3bp1S8Sd0ZLuwIEDfPPNN5w8eZJWrVoxaNAgKleu7HSsQlNBLSIiIm51Z6s7eXr+08zYVnLuUp8/f56jR4+yfft2AIwxhISEXCmw69WrR+3atfH393c4aelw8eJFFixYwMaNG6lWrRp33XUXTZs2dTpWkRlfnBy7oGJiYuzatWudjiEiIiLX6PpuV9Iy01j/6HqnoxTKxYsXOXz48FWP1NRUAAICAqhTp85VRXb16tUxxjicuuSw1rJ582bmz5/PpUuX6NatG7169SIwMNDpaDdkjFlnrY3J6zndoRYRERG3G9l6ZInr9gFQoUIFmjZteuVuqbWW06dPX1Vgr1u3jtWrVwNQvnz5K11EcorsihUrOvkSfNbJkyeZPXs2+/fvJzw8nKFDhxIaGup0LLfQHWoRERFxu4NnDtLgnw14pe8rJabbR0FlZmaSlJR0VZGdlJR0ZUnsatWqXSmuIyMjqVOnjsOJnZWRkcGyZctYtmwZgYGB9OvXj44dO5a4O/vXu0OtglpEREQ8oqR2+yiKtLQ0jh49elWRfebMGQDuueceGjdu7HBCZyQkJDB79mySk5OJiopiwIABVKpUyelYRaIuHyIiIuJ1I1qN4JcLfsnelL00rlG6C8qgoCAaNGhAgwYNrmw7f/4877//PnPmzGH8+PEEBJSdsis1NZVvv/2WTZs2Ub169VL/S0XJmeBPRERESpQri7xsL5uLvFSqVInBgweTkpLCihUrnI7jNcnJybz33nts2bKFnj17Mn78+FJdTIPuUIuIiIiHNKjWgC71ujB923R+2f2XZGZlkmkzycjKIDPL9a/NvOrjnOcKsl9s/VgqBfl294HGjRvTqlUrli5dSlRUFNWrV3c6kkclJCQwffp0/Pz8uP/++6lfv77TkbxCBbWIiIh4TE63j8A/un9atK3jt9K6dmu3n9fdBg4cSHx8PHPnzmX06NElbjBeQa1bt445c+ZQs2ZNRo8eXep/echNBbWIiIh4zNiOY0nLTCMjKwN/P38C/ALwN/5XfRzgF4C/n/9VHxdkv4bVGzr98gqkSpUq9O7dm/nz57Nr1y5atGjhdCS3ysrKYv78+axevZomTZpw5513Uq5cOadjeZUKahEREfGYKuWqlLpp84qic+fObNy4kXnz5tGoUaNSs6z5pUuX+Pzzz4mPj6dLly4MGDAAP7+yN0Sv7L1iERERES/z9/dnyJAhnDlzhiVLljgdxy1OnTrF5MmT2bdvH0OHDmXQoEFlspgGFdQiIiIiXlG/fn2io6NZuXIlSUlJTscplgMHDvDuu+9y7tw57rnnHjp27Oh0JEepoBYRERHxkv79+xMUFMScOXMoqYvrbdy4kQ8//JDy5cvz8MMP07BhyejL7kkqqEVERES8pGLFivTr14/9+/ezdetWp+MUirWWuLg4Zs6cSYMGDXjooYeoWbOm07F8ggpqERERES/q0KEDdevW5dtvv+XSpUtOxymQtLQ0Pv30U5YvX07Hjh25++67KV++vNOxfIYKahEREREv8vPzY8iQIVy4cIHFixc7HeeGzpw5w+TJk9m9ezeDBg1iyJAh+Pv7Ox3Lp6igFhEREfGyunXrEhMTww8//MDRo0edjpOvxMREJk2axOnTp7nrrrvo0qVLqV2YpjhUUIuIiIg4oG/fvlSoUIHZs2f75ADFrVu3MmXKFIKCgnjooYdo0qSJ05F8lgpqEREREQeUL1+em2++mcOHD7Nhwwan41xhrWXx4sV8/vnn1KtXj4cffpiQkBCnY/k0FdQiIiIiDmnbti3169cnLi6OixcvOh2H9PR0Pv/8c5YsWUJ0dDT33nsvFSpUcDqWz1NBLSIiIuIQYwxDhgzh8uXLxMXFOZrl3LlzTJkyhW3bttG/f39uu+02AgICHM1UUqigFhEREXFQ7dq16dKlCxs2bODQoUOOZDh69CiTJk0iKSmJUaNG0aNHDw0+LAQV1CIiIiIO6927N5UrV2b27NlkZWV5te3Nmzfz/vvvY4xhzJgxNG/e3KvtlwYqqEVEREQcFhQUxKBBgzh+/Dhr1qzxSpuXL1/miy++4Msvv6ROnTqMHTuWsLAwr7Rd2qhjjIiIiIgPaNmyJY0bN2bx4sW0bt2aypUre6ytxMREPv/8c86cOUPv3r3p2bMnfn66z1pUeudEREREfIAxhsGDB5OZmcn8+fM90kZWVhZLlixh8uTJADz44IP06tVLxXQx6Q61iIiIiI+oUaMGsbGxfP/997Rv355GjRq57dxnzpzhyy+/5MCBA7Rp04YhQ4YQHBzstvOXZfp1RERERMSH9OjRg+rVqzNnzhwyMjLccs7t27czYcIEjh49yvDhw7n99ttVTLuRCmoRERERHxIYGMgtt9xCcnIyK1euLNa50tLSmDVrFjNmzKBmzZo8+uijtGvXTlPiuZm6fIiIiIj4mKZNm9KyZUuWLFlCVFQU1apVK/Q5jh49yueff05ycjKxsbH07t0bf39/94cV3aEWERER8UUDBw7EGMO8efMKdZy1lpUrV/Luu++SlpbGfffdR79+/VRMe5DuUIuIiIj4oKpVq9KrVy/i4uLYtWtXgRZcOX/+PF999RV79+6lRYsW3HrrrVSoUMELacs2FdQiIiIiPqpr165s2rSJefPm0ahRIwIDA/Pdd/fu3cycOZO0tDSGDh1Khw4d1FfaS9TlQ0RERMRH+fv7M3jwYE6fPs3SpUvz3CcjI4O5c+cybdo0KleuzCOPPELHjh1VTHuR7lCLiIiI+LDIyEjatm3LihUraNu2LbVq1bry3IkTJ/j88885ceIEXbp0oX///gQEqLzzNt2hFhEREfFxN998MwEBAcydOxdrLdZafvjhByZNmsSFCxe46667GDRokIpph+hdFxEREfFxlSpVom/fvsydO5e1a9eyd+9edu3aRZMmTRg2bBiVKlVyOmKZpoJaREREpASIiYlhw4YNzJkzB39/fwYOHEiXLl3UV9oHqKAWERERKQH8/PwYNmwY33//Pb169SIsLMzpSOKiglpERESkhAgLC+OnP/2p0zHkGhqUKCIiIiJSDF4vqI0xEcaYxcaY7caYbcaYJ13baxhjFhhj9rj+re7tbCIiIiIiheXEHeoM4GlrbSugK/C4MaYV8GtgobW2KbDQ9bmIiIiIiE/zekFtrT1qrV3v+vgcsAOoBwwDPnDt9gEw3NvZREREREQKy9E+1MaYSKA9sBoItdYedT11DAh1KpeIiIiISEE5VlAbYyoBnwNPWWvP5n7OWmsBm89xjxhj1hpj1iYlJXkhqYiIiIhI/hwpqI0xgWQX0/+11n7h2nzcGFPH9Xwd4ERex1pr37HWxlhrY0JCQrwTWEREREQkH07M8mGA94Ad1trXcj01C7jf9fH9wExvZxMRERERKSwnFnbpAdwLbDHGbHRtew74CzDdGPMQcAAY6UA2EREREZFC8XpBba1dBuS36Hw/b2YRERERESkurZQoIiIiIlIMKqhFRERERIpBBbWIiIiISDGooBYRERERKQYV1CIiIiIixaCCWkRERESkGFRQi4iIiIgUgwpqEREREZFiMNZapzMUmTEmiexVFSV/tYCTToeQ69I18n26Rr5N18f36Rr5Pl2jG2tgrQ3J64kSXVDLjRlj1lprY5zOIfnTNfJ9uka+TdfH9+ka+T5do+JRlw8RERERkWJQQS0iIiIiUgwqqEu/d5wOIDeka+T7dI18m66P79M18n26RsWgPtQiIiIiIsWgO9QiIiIiIsWggroEM8ZEGGMWG2O2G2O2GWOezPXcz4wxO13bX821/TfGmHhjzC5jzEBnkpcdhb1GxphIY0yqMWaj6zHBufRlQ37XyBjzaa7rsN8YszHXMfo68qLCXiN9HXnfda5RtDFmles6rDXGdHZtN8aYN1xfR5uNMR2cfQWlXxGuUW9jzJlcX0cvOvsKfFuA0wGkWDKAp621640xlYF1xpgFQCgwDGhnrb1sjKkNYIxpBYwCWgN1gThjTDNrbaZD+cuCQl0jl73W2mgHspZVeV4ja+1Pc3YwxvwDOOP6WF9H3leoa+SiryPvyu973avAS9baucaYwa7PewO3AE1djy7A265/xXMKe40AllprhzoTt2TRHeoSzFp71Fq73vXxOWAHUA8YD/zFWnvZ9dwJ1yHDgE+stZettQlAPNDZ+8nLjiJcI/Gy61wjIPtOGjASmObapK8jLyvCNRIvu841skAV125VgSOuj4cBH9psq4Bqxpg6Xo5dphThGkkhqKAuJYwxkUB7YDXQDOhpjFltjPneGNPJtVs94FCuwxLJ9UNJPKuA1wigoTFmg2t7TyeyllXXXKMcPYHj1to9rs/1deSgAl4j0NeRY665Rk8BfzPGHAL+DvzGtZu+jhxUwGsE0M0Ys8kYM9cY09rrQUsQFdSlgDGmEvA58JS19izZXXlqAF2BZ4Dprjs44pBCXKOjQH1rbXvgF8BUY0yVfE4rbpTHNcoxGt359AmFuEb6OnJIHtdoPPBza20E8HPgPSfzSaGu0Xqyl9puB/wb+MqBuCWGCuoSzhgTSPYXxn+ttV+4NicCX7j+lLYGyAJqAYeBiFyHh7u2iQcV5hq5uhEkA1hr1wF7yb6bLR6UzzXCGBMA3A58mmt3fR05oDDXSF9HzsjnGt0P5Hw8g/91j9LXkQMKc42stWetteddH88BAo0xtbwcucRQQV2Cue5ovgfssNa+luupr4A+rn2aAUHASWAWMMoYU84Y05DswSBrvBq6jCnsNTLGhBhj/F3bG5F9jfZ5NXQZc51rBNAf2GmtTcy17f/bu78Qzac4juPvz5bshkxKiUjtFU3MRAhRCjXKn6xsade/Vmtxp8jF+FdcrELiwsXmwoWwW/6WyF6Nsii7syMb5cq60G7G1LKL/br4/YYxzcjMr3k8s/N+3Tzn9zz9zjk9p9/Tt/Occ74+Rz220DHyOeq9fxmj/cCVbfkqYHpZztvAxva0j0uAyar6oWcdXoEWOkZJTpv+d7s9+WMVcKB3PV5ePOVjebsM2ACM5+8jvR4BtgHbkuwFjgC3V5PBZyLJ68BXNLt97/NkgiW3oDFKcgXwRJLfaGatN1fVwf+h3yvJnGPUzsisZ9Zyj6ryOeq9BY0R4HPUe/P91m0Cnm//SfgVuKf97H1ghGZT7yHgzp72dmVa6BitA+5N8jvwC7C+zAY4LzMlSpIkSR245EOSJEnqwIBakiRJ6sCAWpIkSerAgFqSJEnqwIBakiRJ6sCAWpL6VJKBJFva8ulJ3lzCtoaSjCxV/ZJ0LDOglqT+NQBsAaiq/VW1bgnbGqI5F1iStECeQy1JfSrJa8ANwD6a7GXnVNVgkjuAG4ETaLIAPkOTbXMDcBgYqaqDSdYCLwKn0iTP2FRVXye5BXgU+AOYpMk2+C2whib989PAu8ALwCBwHPBYVb3Vtn0TcDJwBvBqVT2+tN+EJPU3MyVKUv96GBisqqEkZ9MEudMGgWFgNU0w/FBVDSd5FtgIPAe8TJMl8JskFwMv0aQWHgWurarvkwxU1ZEko8CFVXU/QJKngI+r6q4kA8CuJB+1bV/Utn8I+CzJe1X1+RJ+D5LU1wyoJWl52llVU8BUkkngnfb9ceC8JCcClwJvJJm+5/j2dQx4pU2hvmOe+q8Brk/yYHu9GjirLX9YVQcAkuwALgcMqCWtWAbUkrQ8HZ5RPjrj+ijNb/sq4KeqGpp9Y1VtbmesrwO+SHLBHPUHuLmq9v3jzea+2WsFXTsoaUVzU6Ik9a8p4KTF3FhVPwPfteulSeP8try2qj6tqlHgR+DMOdr6AHgg7fR2kuEZn12d5JQka2jWco8tpo+SdKwwoJakPtUuqxhLshfYuogqbgPuTrIbmKDZ4AiwNcl4W+8nwG5gJ3Buki+T3Ao8SbMZcU+SifZ62i5gO7AH2O76aUkrnad8SJL+s/aUj782L0qSnKGWJEmSOnGGWpIkSerAGWpJkiSpAwNqSZIkqQMDakmSJKkDA2pJkiSpAwNqSZIkqQMDakmSJKmDPwHr7CI0R/dKzQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "plt.figure(figsize=(12, 8))\n", - "plt.plot(forecasts['truth'].iloc[-24:], color='green', label='observed series')\n", - "plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['prediction'], color='purple', label='point prediction')\n", - "plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['lower'], color='grey')\n", - "plt.plot([None for _ in range(forecasts.shape[0])] + forecasts.iloc[-1]['upper'], color='grey')\n", - "plt.xlabel('timestep')\n", - "plt.ylabel('# sunspots')\n", - "plt.title(\"Forecasted amount of sunspots for the next semester\")\n", - "plt.legend()\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Conclusion\n", - "\n", - "In this tutorial, we have gone through how you can train a machine learning model with Lightwood to produce forecasts for a univariate time series task.\n", - "\n", - "There are additional parameters to further customize your timeseries settings and/or prediction insights, so be sure to check the rest of the documentation." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/tutorials/tutorial_update_models/Tutorial -- Update a predictor.html b/docs/tutorials/tutorial_update_models/Tutorial -- Update a predictor.html deleted file mode 100644 index 374c63ec5..000000000 --- a/docs/tutorials/tutorial_update_models/Tutorial -- Update a predictor.html +++ /dev/null @@ -1,1087 +0,0 @@ - - - - - - - - - - Introduction — lightwood 1.6.1 documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - - -
- -
- - - - - - - - - - - - - - - - - - - -
- - - - -
-
-
-
- - - -
-

Introduction

-

In this tutorial, we will go through an example to update a preexisting model. This might be useful when you come across additional data that you would want to consider, without having to train a model from scratch.

-

The main abstraction that Lightwood offers for this is the BaseMixer.partial_fit() method. To call it, you need to pass new training data and a held-out dev subset for internal mixer usage (e.g. early stopping). If you are using an aggregate ensemble, it’s likely you will want to do this for every single mixer. The convienient PredictorInterface.adjust() does this automatically for you.

-
-
-

Initial model training

-

First, let’s train a Lightwood predictor for the concrete strength dataset:

-
-
[1]:
-
-
-
-from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem, predictor_from_json_ai
-import pandas as pd
-
-
-
-
-
[2]:
-
-
-
-# Load data
-df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/staging/tests/data/concrete_strength.csv')
-
-df = df.sample(frac=1, random_state=1)
-train_df = df[:int(0.2*len(df))]
-update_df = df[int(0.2*len(df)):int(0.8*len(df))]
-test_df = df[int(0.8*len(df)):]
-
-print(f'Train dataframe shape: {train_df.shape}')
-print(f'Update dataframe shape: {update_df.shape}')
-print(f'Test dataframe shape: {test_df.shape}')
-
-
-
-
-
-
-
-
-Train dataframe shape: (206, 10)
-Update dataframe shape: (618, 10)
-Test dataframe shape: (206, 10)
-
-
-

Note that we have three different data splits.

-

We will use the training split for the initial model training. As you can see, it’s only a 20% of the total data we have. The update split will be used as training data to adjust/update our model. Finally, the held out test set will give us a rough idea of the impact our updating procedure has on the model’s predictive capabilities.

-
-
[4]:
-
-
-
-# Define predictive task and predictor
-target = 'concrete_strength'
-pdef = ProblemDefinition.from_dict({'target': target, 'time_aim': 200})
-jai = json_ai_from_problem(df, pdef)
-
-# We will keep the architecture simple: a single neural mixer, and a `BestOf` ensemble:
-jai.outputs[target].mixers = [{
-    "module": "Neural",
-    "args": {
-        "fit_on_dev": False,
-        "stop_after": "$problem_definition.seconds_per_mixer",
-        "search_hyperparameters": False,
-    }
-}]
-
-jai.outputs[target].ensemble = {
-    "module": "BestOf",
-    "args": {
-        "args": "$pred_args",
-        "accuracy_functions": "$accuracy_functions",
-    }
-}
-
-# Build and train the predictor
-predictor = predictor_from_json_ai(jai)
-predictor.learn(train_df)
-
-
-
-
-
-
-
-
-INFO:lightwood-91181:Dropping features: []
-INFO:lightwood-91181:Analyzing a sample of 979
-INFO:lightwood-91181:from a total population of 1030, this is equivalent to 95.0% of your data.
-INFO:lightwood-91181:Using 15 processes to deduct types.
-INFO:lightwood-91181:Starting statistical analysis
-INFO:lightwood-91181:Finished statistical analysis
-INFO:lightwood-91181:Unable to import black formatter, predictor code might be a bit ugly.
-INFO:lightwood-91181:Dropping features: []
-INFO:lightwood-91181:Performing statistical analysis on data
-INFO:lightwood-91181:Starting statistical analysis
-INFO:lightwood-91181:Finished statistical analysis
-INFO:lightwood-91181:Cleaning the data
-INFO:lightwood-91181:Splitting the data into train/test
-INFO:lightwood-91181:Preparing the encoders
-INFO:lightwood-91181:Encoder prepping dict length of: 1
-INFO:lightwood-91181:Encoder prepping dict length of: 2
-INFO:lightwood-91181:Encoder prepping dict length of: 3
-INFO:lightwood-91181:Encoder prepping dict length of: 4
-INFO:lightwood-91181:Encoder prepping dict length of: 5
-INFO:lightwood-91181:Encoder prepping dict length of: 6
-INFO:lightwood-91181:Encoder prepping dict length of: 7
-INFO:lightwood-91181:Encoder prepping dict length of: 8
-INFO:lightwood-91181:Encoder prepping dict length of: 9
-INFO:lightwood-91181:Encoder prepping dict length of: 10
-INFO:lightwood-91181:Done running for: concrete_strength
-INFO:lightwood-91181:Done running for: id
-INFO:lightwood-91181:Done running for: cement
-INFO:lightwood-91181:Done running for: slag
-INFO:lightwood-91181:Done running for: flyAsh
-INFO:lightwood-91181:Done running for: water
-INFO:lightwood-91181:Done running for: superPlasticizer
-INFO:lightwood-91181:Done running for: coarseAggregate
-INFO:lightwood-91181:Done running for: fineAggregate
-INFO:lightwood-91181:Done running for: age
-INFO:lightwood-91181:Featurizing the data
-INFO:lightwood-91181:Training the mixers
-torch.cuda.amp.GradScaler is enabled, but CUDA is not available.  Disabling.
-This overload of addcmul_ is deprecated:
-        addcmul_(Number value, Tensor tensor1, Tensor tensor2)
-Consider using one of the following signatures instead:
-        addcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at  ../torch/csrc/utils/python_arg_parser.cpp:1005.)
-INFO:lightwood-91181:Loss of 7.69654655456543 with learning rate 0.0001
-INFO:lightwood-91181:Loss of 6.121406078338623 with learning rate 0.00014
-INFO:lightwood-91181:Loss of 5.7169036865234375 with learning rate 0.00019599999999999997
-INFO:lightwood-91181:Loss of 4.907417297363281 with learning rate 0.00027439999999999995
-INFO:lightwood-91181:Loss of 3.7602126598358154 with learning rate 0.0003841599999999999
-INFO:lightwood-91181:Loss of 1.8155415058135986 with learning rate 0.0005378239999999999
-INFO:lightwood-91181:Loss of 3.7833187580108643 with learning rate 0.0007529535999999998
-INFO:lightwood-91181:Loss of 8.216030836105347 with learning rate 0.0010541350399999995
-INFO:lightwood-91181:Found learning rate of: 0.0005378239999999999
-DEBUG:lightwood-91181:Loss @ epoch 1: 0.7302289009094238
-DEBUG:lightwood-91181:Loss @ epoch 2: 0.9203720092773438
-DEBUG:lightwood-91181:Loss @ epoch 3: 0.8405624628067017
-DEBUG:lightwood-91181:Loss @ epoch 4: 0.7608699202537537
-DEBUG:lightwood-91181:Loss @ epoch 5: 0.6823285222053528
-DEBUG:lightwood-91181:Loss @ epoch 6: 0.606808602809906
-DEBUG:lightwood-91181:Loss @ epoch 7: 0.4470987617969513
-DEBUG:lightwood-91181:Loss @ epoch 8: 0.3933545649051666
-DEBUG:lightwood-91181:Loss @ epoch 9: 0.3497759997844696
-DEBUG:lightwood-91181:Loss @ epoch 10: 0.3151411712169647
-DEBUG:lightwood-91181:Loss @ epoch 11: 0.2879962623119354
-DEBUG:lightwood-91181:Loss @ epoch 12: 0.2667108178138733
-DEBUG:lightwood-91181:Loss @ epoch 13: 0.23354031145572662
-DEBUG:lightwood-91181:Loss @ epoch 14: 0.21926474571228027
-DEBUG:lightwood-91181:Loss @ epoch 15: 0.20496906340122223
-DEBUG:lightwood-91181:Loss @ epoch 16: 0.19059491157531738
-DEBUG:lightwood-91181:Loss @ epoch 17: 0.17612512409687042
-DEBUG:lightwood-91181:Loss @ epoch 18: 0.161383256316185
-DEBUG:lightwood-91181:Loss @ epoch 19: 0.12839828431606293
-DEBUG:lightwood-91181:Loss @ epoch 20: 0.1162123903632164
-DEBUG:lightwood-91181:Loss @ epoch 21: 0.10669219493865967
-DEBUG:lightwood-91181:Loss @ epoch 22: 0.09954904764890671
-DEBUG:lightwood-91181:Loss @ epoch 23: 0.09420691430568695
-DEBUG:lightwood-91181:Loss @ epoch 24: 0.0900391936302185
-DEBUG:lightwood-91181:Loss @ epoch 25: 0.08349908888339996
-DEBUG:lightwood-91181:Loss @ epoch 26: 0.0822099968791008
-DEBUG:lightwood-91181:Loss @ epoch 27: 0.08120812475681305
-DEBUG:lightwood-91181:Loss @ epoch 28: 0.0804857686161995
-DEBUG:lightwood-91181:Loss @ epoch 29: 0.07996372133493423
-DEBUG:lightwood-91181:Loss @ epoch 30: 0.07936403155326843
-DEBUG:lightwood-91181:Loss @ epoch 31: 0.07869081199169159
-DEBUG:lightwood-91181:Loss @ epoch 32: 0.07849359512329102
-DEBUG:lightwood-91181:Loss @ epoch 33: 0.07820077985525131
-DEBUG:lightwood-91181:Loss @ epoch 34: 0.07790301740169525
-DEBUG:lightwood-91181:Loss @ epoch 35: 0.07746117562055588
-DEBUG:lightwood-91181:Loss @ epoch 36: 0.0766073539853096
-DEBUG:lightwood-91181:Loss @ epoch 37: 0.07440945506095886
-DEBUG:lightwood-91181:Loss @ epoch 38: 0.07304742932319641
-DEBUG:lightwood-91181:Loss @ epoch 39: 0.07175709307193756
-DEBUG:lightwood-91181:Loss @ epoch 40: 0.0706694945693016
-DEBUG:lightwood-91181:Loss @ epoch 41: 0.06960804760456085
-DEBUG:lightwood-91181:Loss @ epoch 42: 0.0683063194155693
-DEBUG:lightwood-91181:Loss @ epoch 43: 0.06553898006677628
-DEBUG:lightwood-91181:Loss @ epoch 44: 0.06447519361972809
-DEBUG:lightwood-91181:Loss @ epoch 45: 0.06355087459087372
-DEBUG:lightwood-91181:Loss @ epoch 46: 0.06285689026117325
-DEBUG:lightwood-91181:Loss @ epoch 47: 0.0621829479932785
-DEBUG:lightwood-91181:Loss @ epoch 48: 0.06127836927771568
-DEBUG:lightwood-91181:Loss @ epoch 49: 0.05949181318283081
-DEBUG:lightwood-91181:Loss @ epoch 50: 0.058798886835575104
-DEBUG:lightwood-91181:Loss @ epoch 51: 0.058218929916620255
-DEBUG:lightwood-91181:Loss @ epoch 52: 0.057854749262332916
-DEBUG:lightwood-91181:Loss @ epoch 53: 0.05746406316757202
-DEBUG:lightwood-91181:Loss @ epoch 54: 0.056835610419511795
-DEBUG:lightwood-91181:Loss @ epoch 55: 0.05569766089320183
-DEBUG:lightwood-91181:Loss @ epoch 56: 0.05525219812989235
-DEBUG:lightwood-91181:Loss @ epoch 57: 0.05490746721625328
-DEBUG:lightwood-91181:Loss @ epoch 58: 0.054767243564128876
-DEBUG:lightwood-91181:Loss @ epoch 59: 0.05455196276307106
-DEBUG:lightwood-91181:Loss @ epoch 60: 0.0540977418422699
-DEBUG:lightwood-91181:Loss @ epoch 61: 0.05336076393723488
-DEBUG:lightwood-91181:Loss @ epoch 62: 0.053060129284858704
-DEBUG:lightwood-91181:Loss @ epoch 63: 0.05285469442605972
-DEBUG:lightwood-91181:Loss @ epoch 64: 0.0528554692864418
-DEBUG:lightwood-91181:Loss @ epoch 65: 0.05273965373635292
-DEBUG:lightwood-91181:Loss @ epoch 66: 0.05239948257803917
-DEBUG:lightwood-91181:Loss @ epoch 67: 0.05194811150431633
-DEBUG:lightwood-91181:Loss @ epoch 68: 0.05178629234433174
-DEBUG:lightwood-91181:Loss @ epoch 69: 0.05171119421720505
-DEBUG:lightwood-91181:Loss @ epoch 70: 0.05184203386306763
-DEBUG:lightwood-91181:Loss @ epoch 71: 0.05181184783577919
-DEBUG:lightwood-91181:Loss @ epoch 72: 0.05157444253563881
-DEBUG:lightwood-91181:Loss @ epoch 73: 0.05137106031179428
-DEBUG:lightwood-91181:Loss @ epoch 74: 0.05131785199046135
-DEBUG:lightwood-91181:Loss @ epoch 75: 0.05133713781833649
-DEBUG:lightwood-91181:Loss @ epoch 76: 0.05156172439455986
-INFO:lightwood-91181:Ensembling the mixer
-INFO:lightwood-91181:Mixer: Neural got accuracy: 0.5960601553597429
-INFO:lightwood-91181:Picked best mixer: Neural
-INFO:lightwood-91181:Analyzing the ensemble of mixers
-INFO:lightwood-91181:The block ICP is now running its analyze() method
-INFO:lightwood-91181:The block AccStats is now running its analyze() method
-INFO:lightwood-91181:The block GlobalFeatureImportance is now running its analyze() method
-INFO:lightwood-91181:Adjustment on validation requested.
-INFO:lightwood-91181:Updating the mixers
-torch.cuda.amp.GradScaler is enabled, but CUDA is not available.  Disabling.
-DEBUG:lightwood-91181:Loss @ epoch 1: 0.06892643496394157
-DEBUG:lightwood-91181:Loss @ epoch 2: 0.06978078782558442
-DEBUG:lightwood-91181:Loss @ epoch 3: 0.06783530339598656
-DEBUG:lightwood-91181:Loss @ epoch 4: 0.07201590612530709
-DEBUG:lightwood-91181:Loss @ epoch 5: 0.0718848429620266
-
-
-
-
[6]:
-
-
-
-# Train and get predictions for the held out test set
-predictions = predictor.predict(test_df)
-predictions
-
-
-
-
-
-
-
-
-INFO:lightwood-91181:Dropping features: []
-INFO:lightwood-91181:Cleaning the data
-INFO:lightwood-91181:Featurizing the data
-INFO:lightwood-91181:The block ICP is now running its explain() method
-INFO:lightwood-91181:The block AccStats is now running its explain() method
-INFO:lightwood-91181:AccStats.explain() has not been implemented, no modifications will be done to the data insights.
-INFO:lightwood-91181:The block GlobalFeatureImportance is now running its explain() method
-INFO:lightwood-91181:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.
-
-
-
-
[6]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
predictiontruthconfidencelowerupper
051.19360371.300.999130.54044371.846764
128.50339039.600.99917.85022949.156551
218.35613910.790.99910.00000039.009300
316.0620944.830.99910.00000036.715254
432.62362947.710.999111.97046953.276790
..................
20145.63381140.930.999124.98065066.286972
20241.61320952.820.999120.96004862.266369
20331.29704439.660.999110.64388351.950204
20429.40925813.290.99918.75609750.062418
20537.71213817.840.999117.05897758.365298
-

206 rows × 5 columns

-
-
-
-

Updating the predictor

-

As previously mentioned, you can update any given mixer with a BaseMixer.partial_fit() call. If you have multiple mixers and want to update them all at once, you should use PredictorInterface.adjust().

-

For both of these methods, two encoded datasources are needed as input (for adjust you need to wrap them in a dictionary with ‘old’ and ‘new’ keys).

-

Let’s adjust our predictor:

-
-
[8]:
-
-
-
-from lightwood.data import EncodedDs
-
-train_ds = EncodedDs(predictor.encoders, train_df, target)
-update_ds = EncodedDs(predictor.encoders, update_df, target)
-
-predictor.adjust({'old': train_ds, 'new': update_ds})
-
-
-
-
-
-
-
-
-INFO:lightwood-91181:Updating the mixers
-torch.cuda.amp.GradScaler is enabled, but CUDA is not available.  Disabling.
-DEBUG:lightwood-91181:Loss @ epoch 1: 0.06545061928530534
-DEBUG:lightwood-91181:Loss @ epoch 2: 0.0679960281898578
-DEBUG:lightwood-91181:Loss @ epoch 3: 0.07171888339022796
-DEBUG:lightwood-91181:Loss @ epoch 4: 0.07307156516859929
-DEBUG:lightwood-91181:Loss @ epoch 5: 0.06360626469055812
-DEBUG:lightwood-91181:Loss @ epoch 6: 0.06457449619968732
-DEBUG:lightwood-91181:Loss @ epoch 7: 0.057915804286797844
-DEBUG:lightwood-91181:Loss @ epoch 8: 0.06492673171063264
-
-
-
-
[9]:
-
-
-
-new_predictions = predictor.predict(test_df)
-new_predictions
-
-
-
-
-
-
-
-
-INFO:lightwood-91181:Dropping features: []
-INFO:lightwood-91181:Cleaning the data
-INFO:lightwood-91181:Featurizing the data
-INFO:lightwood-91181:The block ICP is now running its explain() method
-INFO:lightwood-91181:The block AccStats is now running its explain() method
-INFO:lightwood-91181:AccStats.explain() has not been implemented, no modifications will be done to the data insights.
-INFO:lightwood-91181:The block GlobalFeatureImportance is now running its explain() method
-INFO:lightwood-91181:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.
-
-
-
-
[9]:
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
predictiontruthconfidencelowerupper
053.39225371.300.999132.73909374.045414
127.88629239.600.99917.23313248.539453
216.30178810.790.99910.00000036.954948
313.8628274.830.99910.00000034.515988
431.42103547.710.999110.76787552.074196
..................
20142.63103740.930.999121.97787663.284197
20237.50244452.820.999116.84928358.155604
20329.49148739.660.99918.83832650.144647
20428.01357013.290.99917.36041048.666731
20535.33604317.840.999114.68288355.989204
-

206 rows × 5 columns

-
-
-

Nice! Our predictor was updated, and new predictions are looking good. Let’s compare the old and new accuracies:

-
-
[10]:
-
-
-
-from sklearn.metrics import r2_score
-
-old_acc = r2_score(predictions['truth'], predictions['prediction'])
-new_acc = r2_score(new_predictions['truth'], new_predictions['prediction'])
-
-print(f'Old Accuracy: {round(old_acc, 3)}\nNew Accuracy: {round(new_acc, 3)}')
-
-
-
-
-
-
-
-
-Old Accuracy: 0.583
-New Accuracy: 0.624
-
-
-

After updating, we see an increase in the R2 score of predictions for the held out test set.

-
-
-

Conclusion

-

We have gone through a simple example of how Lightwood predictors can leverage newly acquired data to improve their predictions. The interface for doing so is fairly simple, requiring only some new data and a single call to update.

-

You can further customize the logic for updating your mixers by modifying the partial_fit() methods in them.

-
-
- - -
- -
-
- -
- -
-

- © Copyright 2017-2021, MindsDB. - -

-
- - - - Built with Sphinx using a - - theme - - provided by Read the Docs. - -
-
-
- -
- -
- - - - - - - - - - - \ No newline at end of file diff --git a/docs/tutorials/tutorial_update_models/Tutorial -- Update a predictor.ipynb b/docs/tutorials/tutorial_update_models/Tutorial -- Update a predictor.ipynb deleted file mode 100644 index fcb2a4397..000000000 --- a/docs/tutorials/tutorial_update_models/Tutorial -- Update a predictor.ipynb +++ /dev/null @@ -1,703 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Introduction\n", - "\n", - "In this tutorial, we will go through an example to update a preexisting model. This might be useful when you come across additional data that you would want to consider, without having to train a model from scratch.\n", - "\n", - "The main abstraction that Lightwood offers for this is the `BaseMixer.partial_fit()` method. To call it, you need to pass new training data and a held-out dev subset for internal mixer usage (e.g. early stopping). If you are using an aggregate ensemble, it's likely you will want to do this for every single mixer. The convienient `PredictorInterface.adjust()` does this automatically for you.\n", - "\n", - "\n", - "# Initial model training\n", - "\n", - "First, let's train a Lightwood predictor for the `concrete strength` dataset:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from lightwood.api.high_level import ProblemDefinition, json_ai_from_problem, predictor_from_json_ai\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Train dataframe shape: (206, 10)\n", - "Update dataframe shape: (618, 10)\n", - "Test dataframe shape: (206, 10)\n" - ] - } - ], - "source": [ - "# Load data\n", - "df = pd.read_csv('https://raw.githubusercontent.com/mindsdb/lightwood/staging/tests/data/concrete_strength.csv')\n", - "\n", - "df = df.sample(frac=1, random_state=1)\n", - "train_df = df[:int(0.2*len(df))]\n", - "update_df = df[int(0.2*len(df)):int(0.8*len(df))]\n", - "test_df = df[int(0.8*len(df)):]\n", - "\n", - "print(f'Train dataframe shape: {train_df.shape}')\n", - "print(f'Update dataframe shape: {update_df.shape}')\n", - "print(f'Test dataframe shape: {test_df.shape}')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that we have three different data splits.\n", - "\n", - "We will use the `training` split for the initial model training. As you can see, it's only a 20% of the total data we have. The `update` split will be used as training data to adjust/update our model. Finally, the held out `test` set will give us a rough idea of the impact our updating procedure has on the model's predictive capabilities." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Analyzing a sample of 979\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:from a total population of 1030, this is equivalent to 95.0% of your data.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Using 15 processes to deduct types.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Unable to import black formatter, predictor code might be a bit ugly.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Performing statistical analysis on data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Starting statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Finished statistical analysis\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Splitting the data into train/test\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Preparing the encoders\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 1\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 2\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 3\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 4\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 5\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 6\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 7\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 8\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 9\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Encoder prepping dict length of: 10\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: concrete_strength\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: id\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: cement\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: slag\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: flyAsh\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: water\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: superPlasticizer\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: coarseAggregate\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: fineAggregate\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Done running for: age\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Training the mixers\u001b[0m\n", - "torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - "This overload of addcmul_ is deprecated:\n", - "\taddcmul_(Number value, Tensor tensor1, Tensor tensor2)\n", - "Consider using one of the following signatures instead:\n", - "\taddcmul_(Tensor tensor1, Tensor tensor2, *, Number value) (Triggered internally at ../torch/csrc/utils/python_arg_parser.cpp:1005.)\n", - "\u001b[32mINFO:lightwood-91181:Loss of 7.69654655456543 with learning rate 0.0001\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 6.121406078338623 with learning rate 0.00014\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 5.7169036865234375 with learning rate 0.00019599999999999997\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 4.907417297363281 with learning rate 0.00027439999999999995\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 3.7602126598358154 with learning rate 0.0003841599999999999\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 1.8155415058135986 with learning rate 0.0005378239999999999\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 3.7833187580108643 with learning rate 0.0007529535999999998\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Loss of 8.216030836105347 with learning rate 0.0010541350399999995\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Found learning rate of: 0.0005378239999999999\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 1: 0.7302289009094238\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 2: 0.9203720092773438\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 3: 0.8405624628067017\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 4: 0.7608699202537537\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 5: 0.6823285222053528\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 6: 0.606808602809906\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 7: 0.4470987617969513\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 8: 0.3933545649051666\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 9: 0.3497759997844696\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 10: 0.3151411712169647\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 11: 0.2879962623119354\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 12: 0.2667108178138733\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 13: 0.23354031145572662\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 14: 0.21926474571228027\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 15: 0.20496906340122223\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 16: 0.19059491157531738\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 17: 0.17612512409687042\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 18: 0.161383256316185\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 19: 0.12839828431606293\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 20: 0.1162123903632164\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 21: 0.10669219493865967\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 22: 0.09954904764890671\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 23: 0.09420691430568695\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 24: 0.0900391936302185\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 25: 0.08349908888339996\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 26: 0.0822099968791008\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 27: 0.08120812475681305\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 28: 0.0804857686161995\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 29: 0.07996372133493423\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 30: 0.07936403155326843\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 31: 0.07869081199169159\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 32: 0.07849359512329102\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 33: 0.07820077985525131\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 34: 0.07790301740169525\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 35: 0.07746117562055588\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 36: 0.0766073539853096\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 37: 0.07440945506095886\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 38: 0.07304742932319641\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 39: 0.07175709307193756\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 40: 0.0706694945693016\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 41: 0.06960804760456085\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 42: 0.0683063194155693\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 43: 0.06553898006677628\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 44: 0.06447519361972809\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 45: 0.06355087459087372\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 46: 0.06285689026117325\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 47: 0.0621829479932785\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 48: 0.06127836927771568\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 49: 0.05949181318283081\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 50: 0.058798886835575104\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 51: 0.058218929916620255\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 52: 0.057854749262332916\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 53: 0.05746406316757202\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 54: 0.056835610419511795\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 55: 0.05569766089320183\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 56: 0.05525219812989235\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 57: 0.05490746721625328\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 58: 0.054767243564128876\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 59: 0.05455196276307106\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 60: 0.0540977418422699\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 61: 0.05336076393723488\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 62: 0.053060129284858704\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 63: 0.05285469442605972\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 64: 0.0528554692864418\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 65: 0.05273965373635292\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 66: 0.05239948257803917\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 67: 0.05194811150431633\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 68: 0.05178629234433174\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 69: 0.05171119421720505\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 70: 0.05184203386306763\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 71: 0.05181184783577919\u001b[0m\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 72: 0.05157444253563881\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 73: 0.05137106031179428\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 74: 0.05131785199046135\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 75: 0.05133713781833649\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 76: 0.05156172439455986\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Ensembling the mixer\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Mixer: Neural got accuracy: 0.5960601553597429\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Picked best mixer: Neural\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Analyzing the ensemble of mixers\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block ICP is now running its analyze() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block AccStats is now running its analyze() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block GlobalFeatureImportance is now running its analyze() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Adjustment on validation requested.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Updating the mixers\u001b[0m\n", - "torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 1: 0.06892643496394157\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 2: 0.06978078782558442\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 3: 0.06783530339598656\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 4: 0.07201590612530709\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 5: 0.0718848429620266\u001b[0m\n" - ] - } - ], - "source": [ - "# Define predictive task and predictor\n", - "target = 'concrete_strength'\n", - "pdef = ProblemDefinition.from_dict({'target': target, 'time_aim': 200})\n", - "jai = json_ai_from_problem(df, pdef)\n", - "\n", - "# We will keep the architecture simple: a single neural mixer, and a `BestOf` ensemble:\n", - "jai.outputs[target].mixers = [{\n", - " \"module\": \"Neural\",\n", - " \"args\": {\n", - " \"fit_on_dev\": False,\n", - " \"stop_after\": \"$problem_definition.seconds_per_mixer\",\n", - " \"search_hyperparameters\": False,\n", - " }\n", - "}]\n", - "\n", - "jai.outputs[target].ensemble = {\n", - " \"module\": \"BestOf\",\n", - " \"args\": {\n", - " \"args\": \"$pred_args\",\n", - " \"accuracy_functions\": \"$accuracy_functions\",\n", - " }\n", - "}\n", - "\n", - "# Build and train the predictor\n", - "predictor = predictor_from_json_ai(jai)\n", - "predictor.learn(train_df)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block ICP is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block AccStats is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block GlobalFeatureImportance is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
predictiontruthconfidencelowerupper
051.19360371.300.999130.54044371.846764
128.50339039.600.99917.85022949.156551
218.35613910.790.99910.00000039.009300
316.0620944.830.99910.00000036.715254
432.62362947.710.999111.97046953.276790
..................
20145.63381140.930.999124.98065066.286972
20241.61320952.820.999120.96004862.266369
20331.29704439.660.999110.64388351.950204
20429.40925813.290.99918.75609750.062418
20537.71213817.840.999117.05897758.365298
\n", - "

206 rows × 5 columns

\n", - "
" - ], - "text/plain": [ - " prediction truth confidence lower upper\n", - "0 51.193603 71.30 0.9991 30.540443 71.846764\n", - "1 28.503390 39.60 0.9991 7.850229 49.156551\n", - "2 18.356139 10.79 0.9991 0.000000 39.009300\n", - "3 16.062094 4.83 0.9991 0.000000 36.715254\n", - "4 32.623629 47.71 0.9991 11.970469 53.276790\n", - ".. ... ... ... ... ...\n", - "201 45.633811 40.93 0.9991 24.980650 66.286972\n", - "202 41.613209 52.82 0.9991 20.960048 62.266369\n", - "203 31.297044 39.66 0.9991 10.643883 51.950204\n", - "204 29.409258 13.29 0.9991 8.756097 50.062418\n", - "205 37.712138 17.84 0.9991 17.058977 58.365298\n", - "\n", - "[206 rows x 5 columns]" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Train and get predictions for the held out test set\n", - "predictions = predictor.predict(test_df)\n", - "predictions" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Updating the predictor\n", - "\n", - "As previously mentioned, you can update any given mixer with a `BaseMixer.partial_fit()` call. If you have multiple mixers and want to update them all at once, you should use `PredictorInterface.adjust()`. \n", - "\n", - "For both of these methods, two encoded datasources are needed as input (for `adjust` you need to wrap them in a dictionary with 'old' and 'new' keys). \n", - "\n", - "Let's `adjust` our predictor:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Updating the mixers\u001b[0m\n", - "torch.cuda.amp.GradScaler is enabled, but CUDA is not available. Disabling.\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 1: 0.06545061928530534\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 2: 0.0679960281898578\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 3: 0.07171888339022796\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 4: 0.07307156516859929\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 5: 0.06360626469055812\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 6: 0.06457449619968732\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 7: 0.057915804286797844\u001b[0m\n", - "\u001b[37mDEBUG:lightwood-91181:Loss @ epoch 8: 0.06492673171063264\u001b[0m\n" - ] - } - ], - "source": [ - "from lightwood.data import EncodedDs\n", - "\n", - "train_ds = EncodedDs(predictor.encoders, train_df, target)\n", - "update_ds = EncodedDs(predictor.encoders, update_df, target)\n", - "\n", - "predictor.adjust({'old': train_ds, 'new': update_ds})" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32mINFO:lightwood-91181:Dropping features: []\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Cleaning the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:Featurizing the data\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block ICP is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block AccStats is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:AccStats.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:The block GlobalFeatureImportance is now running its explain() method\u001b[0m\n", - "\u001b[32mINFO:lightwood-91181:GlobalFeatureImportance.explain() has not been implemented, no modifications will be done to the data insights.\u001b[0m\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
predictiontruthconfidencelowerupper
053.39225371.300.999132.73909374.045414
127.88629239.600.99917.23313248.539453
216.30178810.790.99910.00000036.954948
313.8628274.830.99910.00000034.515988
431.42103547.710.999110.76787552.074196
..................
20142.63103740.930.999121.97787663.284197
20237.50244452.820.999116.84928358.155604
20329.49148739.660.99918.83832650.144647
20428.01357013.290.99917.36041048.666731
20535.33604317.840.999114.68288355.989204
\n", - "

206 rows × 5 columns

\n", - "
" - ], - "text/plain": [ - " prediction truth confidence lower upper\n", - "0 53.392253 71.30 0.9991 32.739093 74.045414\n", - "1 27.886292 39.60 0.9991 7.233132 48.539453\n", - "2 16.301788 10.79 0.9991 0.000000 36.954948\n", - "3 13.862827 4.83 0.9991 0.000000 34.515988\n", - "4 31.421035 47.71 0.9991 10.767875 52.074196\n", - ".. ... ... ... ... ...\n", - "201 42.631037 40.93 0.9991 21.977876 63.284197\n", - "202 37.502444 52.82 0.9991 16.849283 58.155604\n", - "203 29.491487 39.66 0.9991 8.838326 50.144647\n", - "204 28.013570 13.29 0.9991 7.360410 48.666731\n", - "205 35.336043 17.84 0.9991 14.682883 55.989204\n", - "\n", - "[206 rows x 5 columns]" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "new_predictions = predictor.predict(test_df)\n", - "new_predictions" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Nice! Our predictor was updated, and new predictions are looking good. Let's compare the old and new accuracies:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Old Accuracy: 0.583\n", - "New Accuracy: 0.624\n" - ] - } - ], - "source": [ - "from sklearn.metrics import r2_score\n", - "\n", - "old_acc = r2_score(predictions['truth'], predictions['prediction'])\n", - "new_acc = r2_score(new_predictions['truth'], new_predictions['prediction'])\n", - "\n", - "print(f'Old Accuracy: {round(old_acc, 3)}\\nNew Accuracy: {round(new_acc, 3)}')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "After updating, we see an increase in the R2 score of predictions for the held out test set.\n", - "\n", - "## Conclusion\n", - "\n", - "We have gone through a simple example of how Lightwood predictors can leverage newly acquired data to improve their predictions. The interface for doing so is fairly simple, requiring only some new data and a single call to update.\n", - "\n", - "You can further customize the logic for updating your mixers by modifying the `partial_fit()` methods in them." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "mdb", - "language": "python", - "name": "mdb" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docssrc/README.md b/docssrc/README.md index 05830163e..19fa7e8e1 100644 --- a/docssrc/README.md +++ b/docssrc/README.md @@ -1,16 +1,12 @@ ## Compiling the docs -`pip3 install 'Sphinx==4.1.2' 'sphinx-autoapi==1.8.4' 'sphinx-autodoc-typehints==1.12.0' 'sphinx-code-include==1.1.1' 'sphinx-rtd-theme==0.5.2' 'sphinxcontrib-applehelp==1.0.2' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.0' 'sphinxcontrib-jsmath==1.0.1' 'sphinxcontrib-napoleon==0.7' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5' autoapi nbsphinx myst_parser` -`cd docssrc` -`make github` -`cd ../docs && python3 -m http.server` -Should now be available at: 0.0.0.0:8000 +- Make sure you are in `docssrc`, then follow the instructions under `run` in our [documentation building github actions job](https://github.com/mindsdb/lightwood/blob/staging/.github/workflows/doc_build.yml#L21) +- Then go into the newly build docs and start a server to see them: `cd ../docs && python3 -m http.server` +- Should now be available at: 0.0.0.0:8000 | Alternatively, you can just open the `index.html` with a browser and that should work too +## Ref -## Creating the docs -*They are already created, you shouldn't have to do this unless you are restarting from scratch* +for how autosummary works: https://stackoverflow.com/questions/2701998/sphinx-autodoc-is-not-automatic-enough -First, make a new directory (should exist) named `docs`. +## Manual steps -Within `docssrc`, run sphinx-quickstart (https://www.sphinx-doc.org/en/master/usage/quickstart.html). - -I opted to separate source/build directories. This allows, in the long run, simplicity between code + build. +currently notebooks have to be built manually using: `find . -iname '*.ipynb' -exec jupyter nbconvert --to notebook --inplace --execute {} \;` \ No newline at end of file diff --git a/docssrc/compile_docs.sh b/docssrc/compile_docs.sh deleted file mode 100755 index 3235c1f7c..000000000 --- a/docssrc/compile_docs.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -# 2021.09.14 - -# Build HTML files; run this on the source directory -# Of the form: sphinx-build -b -sphinx-build -b html source build - -# TODO: Hack to move static folders - this should be fixed (NS) -cp -r source/tutorials build \ No newline at end of file diff --git a/docssrc/make.bat b/docssrc/make.bat deleted file mode 100644 index 6247f7e23..000000000 --- a/docssrc/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=source -set BUILDDIR=build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/docssrc/notes.txt b/docssrc/notes.txt deleted file mode 100644 index fcbca275c..000000000 --- a/docssrc/notes.txt +++ /dev/null @@ -1,32 +0,0 @@ -#2021.09.06 - -Lightwood documentation: -(1) https://www.sphinx-doc.org/en/master/tutorial/getting-started.html#setting-up-your-project-and-development-environment - - -https://www.sphinx-doc.org/en/master/tutorial/first-steps.html - -index.rst has the documentation setup for the main page - -I had to add myst_parser into the conf.py file for Markdown capabilities - -Included: - -napoleon - - pip install sphinx-code-include - -Also included -pip install sphinx-autoapi - -In source run: -sphinx-build -b html . _build -https://sphinx-autoapi.readthedocs.io/en/latest/tutorials.html - -ask dlewis with seto + alejo on the CSS parts -dfraser can give design templates for help on this - -** give dlewis, dfraser, alejo, seto, zoran - give a timeline on when we'll get this done - -action item - engineering chanel (dfraser - may need help with the design etc) -dlewis with the CSS etc - -action item - documentation in this format; how do generate the steps - automate and put it on a website \ No newline at end of file diff --git a/docssrc/source/_static/custom.css b/docssrc/source/_static/custom.css index d59f0ee7c..f2e929884 100644 --- a/docssrc/source/_static/custom.css +++ b/docssrc/source/_static/custom.css @@ -10,6 +10,99 @@ /* MindsDB --white: #ffffff; */ /* MindsDB --slate-grey: #5d6970; */ +/* +.wy-nav-side { + font-size: 1.6em; + font-weight: 500; +} +*/ + +.wy-nav-side .toctree-l1 { + font-size: 1.5em; + font-weight: 500; +} + +.wy-nav-side .toctree-l2 { + font-size: 0.7em !important; + font-weight: 400 !important; +} + +.wy-nav-side .toctree-l3 { + font-size: 0.9em !important; + font-weight: 400 !important; +} + +/* .rst-content dl.class dt, .rst-content dl.function dt */ + +.field-list dt.field-odd { + font-size: 13px !important; + color:#2c263f !important; + padding-left: 0rem !important; +} + +.field-list dt.field-even { + font-size: 13px !important; + color:#2c263f !important; + padding-left: 0rem !important; +} + +.field-list dd.field-odd { + font-size: 13px !important; + color:#2c263f !important; + margin-left: 12px !important; +} + +.field-list dd.field-even { + font-size: 13px !important; + color:#2c263f !important; + margin-left: 12px !important; +} + + +.sig { + background:rgba(254, 220, 140, 0.3) !important; + border-top: solid 0px #2c263f !important; + border-left: 0px !important; + padding-left: 8px; + padding-right: 6px; + padding-top: 6px; + padding-bottom: 6px; +} + +.sig .sig-prename { + color: #2c263f; +} + +.sig .sig-name { + color: #2c263f; +} + +.sig .sig-paren { + color: rgb(93, 105, 112); +} +.sig .sig-param { + color: rgb(93, 105, 112); +} + +.sig .property { + color: rgb(93, 105, 112); +} + +div.rst-content a { + color: #00b06d; + text-decoration: none; +} + +div.rst-content a:visited { + color: #00b06d; +} + +a:hover { + color: #00b06d !important; + text-decoration: underline; +} + +/* body { font-family: 'PT Sans', Helvetica, Arial, 'sans-serif'; font-size: 17px; @@ -27,48 +120,35 @@ div.sphinxsidebar p { color: #2c263f; } -/* Home MDAnalysis colour */ +// Home MDAnalysis colour .wy-side-nav-search > a { color: #343131; } -/* Side MDAnalysis version colour */ +// Side MDAnalysis version colour .wy-side-nav-search > div.version { color: #2c263f; } -/* Menubar caption colour */ +// Menubar caption colour div.wy-menu-vertical span.caption-text { color: #00b06d; } -/* Mobile layout menubar option */ +// Mobile layout menubar option nav.wy-nav-top { background: #343131; } -/* Menu search bar outline (default blue) */ +// Menu search bar outline (default blue) .wy-side-nav-search input[type="text"] { border-color: #2c263f; } -/* -- body styles --------------------------------------------------------- */ - -/* Different coloured links for sidebar vs body) */ -div.rst-content a { - color: #00b06d; - text-decoration: none; -} +// -- body styles --------------------------------------------------------- -div.rst-content a:visited { - color: #00b06d; -} - -a:hover { - color: #00b06d !important; - text-decoration: underline; -} +// Different coloured links for sidebar vs body) pre, tt, code { @@ -92,7 +172,7 @@ a.headerlink:hover { color: #fff; } -/* ------- admonition boxes ------- */ +// ------- admonition boxes ------- div.admonition { margin: 10px 0px; @@ -104,15 +184,16 @@ div.admonition p.admonition-title { font-weight: bolder; } -/* ----- Tables ----- */ +// ----- Tables ----- + +// override table width restrictions +// wrap tables instead of scrolling -/* override table width restrictions */ -/* wrap tables instead of scrolling */ @media screen and (min-width: 767px) { .wy-table-responsive table td, .wy-table-responsive table th { - /* !important prevents the common CSS stylesheets from overriding - this as on RTD they are loaded after this stylesheet */ + // !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet white-space: normal !important; } @@ -122,7 +203,7 @@ div.admonition p.admonition-title { } } -/* ----- Field lists ------ */ +// ----- Field lists ------ .section > dl.field-list { display: flex; @@ -157,13 +238,7 @@ dl.field-list > dt::after { margin: 0; } -/* ----- MDAnalysis coloured elements ------ */ - -.rst-content dl.class dt, .rst-content dl.function dt { - color: #ca6500; - background: #FFEBD0; - border-top: solid 3px #00b06d; -} +// ----- MDAnalysis coloured elements ------ .rst-content .viewcode-link, .rst-content .viewcode-back { color: #2c263f; @@ -191,9 +266,7 @@ dl.field-list > dt::after { background: #FFEEED; } -.rst-content .caution p.admonition-title, .rst-content .note p.admonition-title, .rst-content .important p.admonition-title { - background: #00b06d; -} + .rst-content .caution, .rst-content .note, .rst-content .important { background: #FFEBD0; @@ -202,3 +275,15 @@ dl.field-list > dt::after { .rst-content code:not(.xref).literal { color: #ca6500; } + +.rst-content .caution p.admonition-title, .rst-content .note p.admonition-title, .rst-content .important p.admonition-title { + background: #00b06d; +} + +.rst-content dl.class dt, .rst-content dl.function dt { + color: #ca6500; + background: #FFEBD0; + border-top: solid 3px #00b06d; +} + +*/ diff --git a/docssrc/source/conf.py b/docssrc/source/conf.py index 978724232..10a56400c 100644 --- a/docssrc/source/conf.py +++ b/docssrc/source/conf.py @@ -118,17 +118,8 @@ # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -html_css_files = ['_static/custom.css'] +html_css_files = ['custom.css'] -html_context = { - 'css_files': [ - '_static/custom.css', # overrides for wide tables in RTD theme - ]} - -html_theme_path = [ - # msmb_theme.get_html_theme_path(), - sphinx_rtd_theme.get_html_theme_path() -] # Brand logo html_logo = "_static/logos/mindsdblogo.png" @@ -146,7 +137,7 @@ # Autodoc capability # ----------------- # autoapi_template_dir = '_autoapi_templates' -autoapi_root = 'docs/api' +autoapi_root = 'docs' autoapi_generate_api_docs = False autoapi_dirs = ['../../lightwood'] diff --git a/docssrc/source/data.rst b/docssrc/source/data.rst index 3ea9748d2..2a8ebacf4 100644 --- a/docssrc/source/data.rst +++ b/docssrc/source/data.rst @@ -4,4 +4,5 @@ The focus of these modules is on storing, transforming, cleaning, splitting, merging, getting and removing data. .. automodule:: data - :members: \ No newline at end of file + :members: + :show-inheritance: \ No newline at end of file diff --git a/docssrc/source/data/cleaner.rst b/docssrc/source/data/cleaner.rst deleted file mode 100644 index 09efa5de3..000000000 --- a/docssrc/source/data/cleaner.rst +++ /dev/null @@ -1,5 +0,0 @@ -Data Cleaning --------------------- - -.. automodule:: data.cleaner - :members: \ No newline at end of file diff --git a/docssrc/source/index.rst b/docssrc/source/index.rst index e6ac21efb..e0f5324bd 100644 --- a/docssrc/source/index.rst +++ b/docssrc/source/index.rst @@ -189,47 +189,6 @@ To get updates on Lightwood and MindsDB’s latest announcements, releases, and Join our mission of democratizing machine learning and allowing developers to become data scientists! - -Hacktoberfest 2021 -======================= - -We are very excited that Lightwood is participating in this year's Hacktoberfest 2021 event. This month-long event through October gives you the chance to contribute to the Open Source codebase of Lightwood and MindsDB! - -The Lightwood core team has prepared several issues of different types that are ideal for first-time contributors and will be posted throughout the month. It's entirely up to you what you choose to work on and if you have your own great idea, feel free to suggest it by reaching out to us via our Slack community or by posting an issue with the `discussion` tag. - -**Our Major Incentive and SWAG!** - -Make contributions and enter into the draw for a `Deep Learning Laptop `_ **powered by the NVIDIA RTX 3080 Max-Q GPU**. Pre-installed with TensorFlow, PyTorch, CUDA, cuDNN and more. - -.. image:: _static/logos/laptop.jpeg - :align: center - :alt: Tensorbook by Lambda Labs - :width: 455 - :height: 400 - -Also, we’d love to send you a special MindsDB SWAG gift pack: - -.. image:: _static/logos/swag.png - :align: center - :alt: MindsDB Swag - -Please make sure to read the :ref:`contributions-guidelines ` first! - -How to participate -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -1. Contribute by making pull requests to any of our open issues labeled with the `hacktoberfest` tag during October. All hacktoberfest issues will specify how many points a successfully merged PR is worth. -2. Have a total score of at least 5 points in order to enter the big prize draw. -3. Complete the form with links to all your completed PR’s so we know where to ship the gift pack to! - -Entries close at midnight (PST) Sunday, 31 October 2021 with the prize draw winner announced at an online event on Monday, 1st of November. - - -Please check `MindsDB's hacktoberfest website `_ for more details. - -.. note:: if you wish to contribute with something that is *not currently flagged* as a hacktoberfest issue, make an issue (or make a comment if an issue already exists), and let one of the core Lightwood team researchers approve it. - - Contributor Code of Conduct ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Please note that this project is released with a `Contributor Code of Conduct `_. By participating in this project, you agree to abide by its terms. @@ -264,14 +223,9 @@ License Other Links ======================= .. toctree:: - :maxdepth: 1 + :maxdepth: 8 lightwood_philosophy tutorials api - data - encoder - mixer - ensemble - analysis - helpers \ No newline at end of file + data \ No newline at end of file diff --git a/docssrc/source/mixer/helpers/helpers.rst b/docssrc/source/mixer/helpers/helpers.rst new file mode 100644 index 000000000..421ffbf89 --- /dev/null +++ b/docssrc/source/mixer/helpers/helpers.rst @@ -0,0 +1,4 @@ +:mod:`Mixers.Helpers` + +========================== +.. automodule:: mixer.helpers.ar_net \ No newline at end of file diff --git a/docssrc/source/mixer.rst b/docssrc/source/mixer/mixer.rst similarity index 74% rename from docssrc/source/mixer.rst rename to docssrc/source/mixer/mixer.rst index b1c56f273..5f50168e6 100644 --- a/docssrc/source/mixer.rst +++ b/docssrc/source/mixer/mixer.rst @@ -4,4 +4,6 @@ Machine learning models which learn to predict the target value using the encoded representations. .. automodule:: mixer - :members: + :members: + :undoc-members: + :show-inheritance: diff --git a/lightwood/__about__.py b/lightwood/__about__.py index 42c620d8c..34147221a 100755 --- a/lightwood/__about__.py +++ b/lightwood/__about__.py @@ -1,6 +1,6 @@ __title__ = 'lightwood' __package_name__ = 'lightwood' -__version__ = '1.6.1' +__version__ = '1.7.0' __description__ = "Lightwood is a toolkit for automatic machine learning model building" __email__ = "community@mindsdb.com" __author__ = 'MindsDB Inc' diff --git a/lightwood/api/json_ai.py b/lightwood/api/json_ai.py index 730a650ca..e301e154d 100644 --- a/lightwood/api/json_ai.py +++ b/lightwood/api/json_ai.py @@ -80,29 +80,29 @@ def lookup_encoder( tss = problem_defintion.timeseries_settings encoder_lookup = { - dtype.integer: "Integer.NumericEncoder", - dtype.float: "Float.NumericEncoder", - dtype.binary: "Binary.BinaryEncoder", - dtype.categorical: "Categorical.CategoricalAutoEncoder" + dtype.integer: "NumericEncoder", + dtype.float: "NumericEncoder", + dtype.binary: "BinaryEncoder", + dtype.categorical: "CategoricalAutoEncoder" if statistical_analysis is None or len(statistical_analysis.histograms[col_name]) > 100 - else "Categorical.OneHotEncoder", - dtype.tags: "Tags.MultiHotEncoder", - dtype.date: "Date.DatetimeEncoder", - dtype.datetime: "Datetime.DatetimeEncoder", - dtype.image: "Image.Img2VecEncoder", - dtype.rich_text: "Rich_Text.PretrainedLangEncoder", - dtype.short_text: "Short_Text.CategoricalAutoEncoder", - dtype.array: "Array.ArrayEncoder", - dtype.tsarray: "TimeSeries.TimeSeriesEncoder", - dtype.quantity: "Quantity.NumericEncoder", - dtype.audio: "Audio.MFCCEncoder" + else "OneHotEncoder", + dtype.tags: "MultiHotEncoder", + dtype.date: "DatetimeEncoder", + dtype.datetime: "DatetimeEncoder", + dtype.image: "Img2VecEncoder", + dtype.rich_text: "PretrainedLangEncoder", + dtype.short_text: "CategoricalAutoEncoder", + dtype.array: "ArrayEncoder", + dtype.tsarray: "TimeSeriesEncoder", + dtype.quantity: "NumericEncoder", + dtype.audio: "MFCCEncoder" } # If column is a target, only specific feature representations are allowed that enable supervised tasks target_encoder_lookup_override = { - dtype.rich_text: "Rich_Text.VocabularyEncoder", - dtype.categorical: "Categorical.OneHotEncoder", + dtype.rich_text: "VocabularyEncoder", + dtype.categorical: "OneHotEncoder", } # Assign a default encoder to each column. @@ -118,8 +118,12 @@ def lookup_encoder( if col_dtype in (dtype.categorical, dtype.binary): if problem_defintion.unbias_target: encoder_dict["args"][ - "target_class_distribution" + "target_weights" ] = "$statistical_analysis.target_class_distribution" + if problem_defintion.target_weights is not None: + encoder_dict["args"][ + "target_weights" + ] = problem_defintion.target_weights if col_dtype in (dtype.integer, dtype.float, dtype.array, dtype.tsarray): encoder_dict["args"][ @@ -130,7 +134,7 @@ def lookup_encoder( if tss.is_timeseries: gby = tss.group_by if tss.group_by is not None else [] if col_name in tss.order_by + tss.historical_columns: - encoder_dict["module"] = col_dtype.capitalize() + ".TimeSeriesEncoder" + encoder_dict["module"] = "TimeSeriesEncoder" encoder_dict["args"]["original_type"] = f'"{col_dtype}"' encoder_dict["args"]["target"] = "self.target" encoder_dict["args"]["grouped_by"] = f"{gby}" @@ -138,24 +142,24 @@ def lookup_encoder( if is_target: if col_dtype in [dtype.integer]: encoder_dict["args"]["grouped_by"] = f"{gby}" - encoder_dict["module"] = "Integer.TsNumericEncoder" + encoder_dict["module"] = "TsNumericEncoder" if col_dtype in [dtype.float]: encoder_dict["args"]["grouped_by"] = f"{gby}" - encoder_dict["module"] = "Float.TsNumericEncoder" + encoder_dict["module"] = "TsNumericEncoder" if tss.nr_predictions > 1: encoder_dict["args"]["grouped_by"] = f"{gby}" encoder_dict["args"]["timesteps"] = f"{tss.nr_predictions}" - encoder_dict["module"] = "TimeSeries.TsArrayNumericEncoder" + encoder_dict["module"] = "TsArrayNumericEncoder" if "__mdb_ts_previous" in col_name: - encoder_dict["module"] = "Array.ArrayEncoder" + encoder_dict["module"] = "ArrayEncoder" encoder_dict["args"]["original_type"] = f'"{tss.target_type}"' encoder_dict["args"]["window"] = f"{tss.window}" # Set arguments for the encoder - if encoder_dict["module"] == "Rich_Text.PretrainedLangEncoder" and not is_target: + if encoder_dict["module"] == "PretrainedLangEncoder" and not is_target: encoder_dict["args"]["output_type"] = "$dtype_dict[$target]" - if eval(encoder_dict["module"].split(".")[1]).is_trainable_encoder: + if eval(encoder_dict["module"]).is_trainable_encoder: encoder_dict["args"]["stop_after"] = "$problem_definition.seconds_per_encoder" if is_target_predicting_encoder: @@ -310,7 +314,7 @@ def generate_json_ai( if ( tss.is_timeseries - and eval(encoder["module"].split(".")[1]).is_timeseries_encoder + and eval(encoder["module"]).is_timeseries_encoder ): if tss.group_by is not None: for group in tss.group_by: @@ -383,7 +387,7 @@ def generate_json_ai( [ x for x in features.values() - if eval(x.encoder["module"].split(".")[1]).is_trainable_encoder + if eval(x.encoder["module"]).is_trainable_encoder ] ) nr_mixers = len(list(outputs.values())[0].mixers) @@ -533,6 +537,10 @@ def _add_implicit_values(json_ai: JsonAI) -> JsonAI: mixers[i]["args"]["input_cols"] = mixers[i]["args"].get( "input_cols", "$input_cols" ) + mixers[i]["args"]["target_encoder"] = mixers[i]["args"].get( + "target_encoder", "$encoders[self.target]" + ) + mixers[i]["args"]["use_optuna"] = True elif mixers[i]["module"] == "Regression": mixers[i]["args"]["target"] = mixers[i]["args"].get("target", "$target") mixers[i]["args"]["dtype_dict"] = mixers[i]["args"].get( @@ -549,6 +557,9 @@ def _add_implicit_values(json_ai: JsonAI) -> JsonAI: mixers[i]["args"]["input_cols"] = mixers[i]["args"].get( "input_cols", "$input_cols" ) + mixers[i]["args"]["target_encoder"] = mixers[i]["args"].get( + "target_encoder", "$encoders[self.target]" + ) elif mixers[i]["module"] == "SkTime": mixers[i]["args"]["target"] = mixers[i]["args"].get("target", "$target") mixers[i]["args"]["dtype_dict"] = mixers[i]["args"].get( @@ -566,10 +577,6 @@ def _add_implicit_values(json_ai: JsonAI) -> JsonAI: for name in json_ai.features: if json_ai.features[name].dependency is None: json_ai.features[name].dependency = [] - if json_ai.features[name].data_dtype is None: - json_ai.features[name].data_dtype = ( - json_ai.features[name].encoder["module"].split(".")[0].lower() - ) # Add "hidden" fields hidden_fields = { @@ -723,8 +730,9 @@ def code_from_json_ai(json_ai: JsonAI) -> str: ) ) dependency_dict[col_name] = [] - dtype_dict[col_name] = f"""'{list(json_ai.outputs.values())[0].data_dtype}'""" - json_ai.features[col_name] = Feature(encoder=encoder_dict[col_name]) + data_dtype = list(json_ai.outputs.values())[0].data_dtype + dtype_dict[col_name] = f"""'{data_dtype}'""" + json_ai.features[col_name] = Feature(encoder=encoder_dict[col_name], data_dtype=data_dtype) # ----------------- # diff --git a/lightwood/api/types.py b/lightwood/api/types.py index a96b29fac..9f1b98b3d 100644 --- a/lightwood/api/types.py +++ b/lightwood/api/types.py @@ -41,7 +41,7 @@ class Feature: """ encoder: Module - data_dtype: str = None + data_dtype: str dependency: List[str] = None @staticmethod @@ -326,7 +326,7 @@ class ProblemDefinition: unbias_target: bool seconds_per_mixer: Union[int, None] seconds_per_encoder: Union[int, None] - time_aim: Union[int, None] + time_aim: Union[float, None] target_weights: Union[List[float], None] positive_domain: bool timeseries_settings: TimeseriesSettings @@ -355,7 +355,7 @@ def from_dict(obj: Dict): target_weights = obj.get('target_weights', None) positive_domain = obj.get('positive_domain', False) timeseries_settings = TimeseriesSettings.from_dict(obj.get('timeseries_settings', {})) - anomaly_detection = obj.get('anomaly_detection', True) + anomaly_detection = obj.get('anomaly_detection', False) ignore_features = obj.get('ignore_features', []) fit_on_all = obj.get('fit_on_all', True) strict_mode = obj.get('strict_mode', True) diff --git a/lightwood/data/cleaner.py b/lightwood/data/cleaner.py index b51ee8229..23ea78761 100644 --- a/lightwood/data/cleaner.py +++ b/lightwood/data/cleaner.py @@ -289,7 +289,7 @@ def _remove_columns(data: pd.DataFrame, identifiers: Dict[str, object], target: if mode == "predict": if ( target in data.columns - and not timeseries_settings.use_previous_target + and (not timeseries_settings.is_timeseries or not timeseries_settings.use_previous_target) and not anomaly_detection ): data = data.drop(columns=[target]) diff --git a/lightwood/data/encoded_ds.py b/lightwood/data/encoded_ds.py index 5ad6b691e..7e171e8e3 100644 --- a/lightwood/data/encoded_ds.py +++ b/lightwood/data/encoded_ds.py @@ -67,11 +67,15 @@ def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]: if hasattr(self.encoders[col], 'data_window'): cols = [self.target] + [f'{self.target}_timestep_{i}' for i in range(1, self.encoders[col].data_window)] + data = [self.data_frame[cols].iloc[idx].tolist()] else: cols = [col] + data = self.data_frame[cols].iloc[idx].tolist() - data = self.data_frame[cols].iloc[idx].tolist() encoded_tensor = self.encoders[col].encode(data, **kwargs)[0] + if torch.isnan(encoded_tensor).any() or torch.isinf(encoded_tensor).any(): + raise Exception(f'Encoded tensor: {encoded_tensor} contains nan or inf values, this tensor is \ + the encoding of column {col} using {self.encoders[col].__class__}') if col != self.target: X = torch.cat([X, encoded_tensor]) else: @@ -103,6 +107,8 @@ def get_encoded_column_data(self, column_name: str) -> torch.Tensor: deps = [dep for dep in self.encoders[column_name].dependencies if dep in self.data_frame.columns] kwargs['dependency_data'] = {dep: self.data_frame[dep].tolist() for dep in deps} encoded_data = self.encoders[column_name].encode(self.data_frame[column_name], **kwargs) + if torch.isnan(encoded_data).any() or torch.isinf(encoded_data).any(): + raise Exception(f'Encoded tensor: {encoded_data} contains nan or inf values') if not isinstance(encoded_data, torch.Tensor): raise Exception( diff --git a/lightwood/data/timeseries_transform.py b/lightwood/data/timeseries_transform.py index 451c3b0e2..f49d9b046 100644 --- a/lightwood/data/timeseries_transform.py +++ b/lightwood/data/timeseries_transform.py @@ -40,6 +40,13 @@ def transform_timeseries( ob_arr = tss.order_by window = tss.window + if tss.use_previous_target and target not in data.columns: + raise Exception(f"Cannot transform. Missing historical values for target column {target} (`use_previous_target` is set to True).") # noqa + + for hcol in tss.historical_columns: + if hcol not in data.columns or data[hcol].isna().any(): + raise Exception(f"Cannot transform. Missing values in historical column {hcol}.") + if '__mdb_make_predictions' in original_df.columns: index = original_df[original_df['__mdb_make_predictions'].map( {'True': True, 'False': False, True: True, False: False}).isin([True])] @@ -162,7 +169,7 @@ def transform_timeseries( df_gb_list = list(combined_df.groupby(tss.group_by)) df_gb_map = {} for gb, df in df_gb_list: - df_gb_map['_' + '_'.join(gb)] = df + df_gb_map['_' + '_'.join(str(gb))] = df timeseries_row_mapping = {} idx = 0 @@ -190,7 +197,6 @@ def transform_timeseries( del combined_df['original_index'] - # return combined_df, secondary_type_dict, timeseries_row_mapping, df_gb_map return combined_df diff --git a/lightwood/encoder/__init__.py b/lightwood/encoder/__init__.py index da3800775..ab85ddfed 100644 --- a/lightwood/encoder/__init__.py +++ b/lightwood/encoder/__init__.py @@ -16,8 +16,6 @@ from lightwood.encoder.array.array import ArrayEncoder from lightwood.encoder.categorical.multihot import MultiHotEncoder from lightwood.encoder.text.pretrained import PretrainedLangEncoder -from lightwood.encoder.type_encoder_maps import (Array, Binary, Categorical, Date, Datetime, Float, Image, Integer, - TimeSeries, Quantity, Rich_Text, Short_Text, Tags, Audio) from lightwood.encoder.audio import MFCCEncoder @@ -25,6 +23,4 @@ __all__ = ['BaseEncoder', 'DatetimeEncoder', 'Img2VecEncoder', 'NumericEncoder', 'TsNumericEncoder', 'TsArrayNumericEncoder', 'ShortTextEncoder', 'VocabularyEncoder', 'TextRnnEncoder', 'OneHotEncoder', 'CategoricalAutoEncoder', 'TimeSeriesEncoder', 'ArrayEncoder', 'MultiHotEncoder', - 'PretrainedLangEncoder', 'BinaryEncoder', 'DatetimeNormalizerEncoder', 'MFCCEncoder', - 'Array', 'TimeSeries', 'Binary', 'Categorical', 'Date', 'Datetime', 'Float', 'Image', 'Integer', - 'Quantity', 'Rich_Text', 'Short_Text', 'Tags', 'Audio'] + 'PretrainedLangEncoder', 'BinaryEncoder', 'DatetimeNormalizerEncoder', 'MFCCEncoder'] diff --git a/lightwood/encoder/array/array.py b/lightwood/encoder/array/array.py index c5b7056ca..a5ecd95df 100644 --- a/lightwood/encoder/array/array.py +++ b/lightwood/encoder/array/array.py @@ -22,7 +22,7 @@ class ArrayEncoder(BaseEncoder): is_trainable_encoder: bool = True - def __init__(self, stop_after: int, window: int = None, is_target: bool = False, original_type: dtype = None): + def __init__(self, stop_after: float, window: int = None, is_target: bool = False, original_type: dtype = None): super().__init__(is_target) self.stop_after = stop_after self.original_type = original_type diff --git a/lightwood/encoder/audio/mfcc.py b/lightwood/encoder/audio/mfcc.py index b5a5dd8f6..4cddc018d 100644 --- a/lightwood/encoder/audio/mfcc.py +++ b/lightwood/encoder/audio/mfcc.py @@ -8,21 +8,40 @@ class MFCCEncoder(BaseEncoder): - """ - Audio encoder. Uses `librosa` to compute the Mel-frequency spectral coefficients (MFCCs) of the audio file. They are a common feature used in speech and audio processing. The features are a 2D array, flattened into a 1D one. - """ # noqa is_trainable_encoder: bool = False def __init__(self, is_target: bool = False): + """ + Audio encoder. + + Uses `librosa` to compute the Mel-frequency spectral coefficients (MFCCs) of the audio file. They are a common feature used in speech and audio processing. Example: https://centaur.reading.ac.uk/88046/3/ESR_for_home_AI.pdf + + The output feature for any given audio file is a 2D array, flattened into a 1D one to comply with the expected format in lightwood mixers. + + Note that this encoder does not have a .decode() method. As such, models that predict audio as output are not supported at this time. + + :param is_target: whether this encoder's column is the target. Should be false as encoder is not bi-directional. + """ # noqa + assert not is_target super().__init__(is_target) def prepare(self, priming_data: pd.Series): + """ + The audio encoder undergoes rule-based processing. Thus, the prepare statement only returns the output dimension size. + + :param priming_data: training data + """ # noqa self.is_prepared = True priming_data = list(priming_data) ele = self.encode([str(priming_data[0])])[0] self.output_size = len(ele) def encode(self, column_data): + """ + Encode a list of audio files. + + :param column_data: list of strings that point to paths or URLs of the audio files that will be encoded. + """ encoded_audio_arr = [] for path in column_data: try: diff --git a/lightwood/encoder/categorical/autoencoder.py b/lightwood/encoder/categorical/autoencoder.py index 5109c4a78..2eef01c3b 100644 --- a/lightwood/encoder/categorical/autoencoder.py +++ b/lightwood/encoder/categorical/autoencoder.py @@ -14,7 +14,7 @@ class CategoricalAutoEncoder(BaseEncoder): is_trainable_encoder: bool = True - def __init__(self, stop_after: int = 3600, is_target: bool = False, max_encoded_length: int = 100): + def __init__(self, stop_after: float = 3600, is_target: bool = False, max_encoded_length: int = 100): super().__init__(is_target) self.is_prepared = False self.name = 'Categorical Autoencoder' diff --git a/lightwood/encoder/categorical/binary.py b/lightwood/encoder/categorical/binary.py index 1c708179f..1aff5fb36 100644 --- a/lightwood/encoder/categorical/binary.py +++ b/lightwood/encoder/categorical/binary.py @@ -6,15 +6,20 @@ # Exists mainly for datasets with loads of binary flags where OHE can be too slow to fit class BinaryEncoder(BaseEncoder): + """ - def __init__(self, is_target=False, target_class_distribution=None): + Why are we handling target weighting inside encoders? Simple: we'd otherwise have to compute per-index weighting inside the mixers, rather than having that code unified inside 2x encoders. So moving this to the mixer will still involve having to pass the target encoder to the mixer, but will add the additional complexity of having to pass a weighting map to the mixer and adding class-to-index translation boilerplate + weight setting for each mixer + """ # noqa + def __init__(self, is_target=False, target_weights=None): super().__init__(is_target) self.map = {} self.rev_map = {} self.output_size = 2 + + self.target_weights = None + self.index_weights = None if self.is_target: - self.target_class_distribution = target_class_distribution - self.index_weights = None + self.target_weights = target_weights def prepare(self, priming_data): if self.is_prepared: @@ -32,8 +37,8 @@ def prepare(self, priming_data): if self.is_target: self.index_weights = [None, None] for word in self.map: - if self.target_class_distribution is not None: - self.index_weights[self.map[word]] = 1 / self.target_class_distribution[word] + if self.target_weights is not None: + self.index_weights[self.map[word]] = 1 / self.target_weights[word] else: self.index_weights[self.map[word]] = 1 diff --git a/lightwood/encoder/categorical/onehot.py b/lightwood/encoder/categorical/onehot.py index 7e5bfadcc..d1b7e6032 100644 --- a/lightwood/encoder/categorical/onehot.py +++ b/lightwood/encoder/categorical/onehot.py @@ -10,8 +10,11 @@ class OneHotEncoder(BaseEncoder): + """ - def __init__(self, is_target=False, target_class_distribution=None, handle_unknown='unknown_token'): + Why are we handling target weighting inside encoders? Simple: we'd otherwise have to compute per-index weighting inside the mixers, rather than having that code unified inside 2x encoders. So moving this to the mixer will still involve having to pass the target encoder to the mixer, but will add the additional complexity of having to pass a weighting map to the mixer and adding class-to-index translation boilerplate + weight setting for each mixer + """ # noqa + def __init__(self, is_target=False, target_weights=None, handle_unknown='unknown_token'): super().__init__(is_target) self._lang = None self.rev_map = {} @@ -22,8 +25,14 @@ def __init__(self, is_target=False, target_class_distribution=None, handle_unkno self.handle_unknown = handle_unknown if self.is_target: - self.target_class_distribution = target_class_distribution - self.index_weights = None + if self.handle_unknown != 'unknown_token': + raise ValueError(f'One Hot Encoders used for target encoding can only be used with `handle_unknown` \ + set to `unknown_token`. The option: "{self.handle_unknown}" is not supported!') + + self.target_weights = None + self.index_weights = None + if self.is_target: + self.target_weights = target_weights def prepare(self, priming_data, max_dimensions=20000): if self.is_prepared: @@ -35,7 +44,7 @@ def prepare(self, priming_data, max_dimensions=20000): self._lang.index2word = {} self._lang.word2index = {} self._lang.n_words = 0 - else: # self.handle_unknown == "unknown_token" + elif self.handle_unknown == "unknown_token": priming_data = [x if x is not None else UNCOMMON_WORD for x in priming_data] self._lang.index2word = {UNCOMMON_TOKEN: UNCOMMON_WORD} self._lang.word2index = {UNCOMMON_WORD: UNCOMMON_TOKEN} @@ -49,7 +58,7 @@ def prepare(self, priming_data, max_dimensions=20000): while self._lang.n_words > max_dimensions: if self.handle_unknown == "return_zeros": necessary_words = [] - else: # self.handle_unknown == "unknown_token" + elif self.handle_unknown == "unknown_token": necessary_words = [UNCOMMON_WORD] least_occuring_words = self._lang.getLeastOccurring(n=len(necessary_words) + 1) @@ -61,17 +70,17 @@ def prepare(self, priming_data, max_dimensions=20000): self._lang.removeWord(word_to_remove) + # Note: Is target assume that we are operating in "unknown_token" mode if self.is_target: - self.index_weights = [None] * self._lang.n_words - if self.target_class_distribution is not None: - self.index_weights[0] = np.mean(list(self.target_class_distribution.values())) - else: - self.index_weights[0] = 1 + self.index_weights = [1] * self._lang.n_words + if self.target_weights is not None: + uncommon_weight = np.min(list(self.target_weights.values())) + self.index_weights[0] = uncommon_weight + self.target_weights[UNCOMMON_WORD] = uncommon_weight for word in set(priming_data): - if self.target_class_distribution is not None: - self.index_weights[self._lang.word2index[str(word)]] = 1 / self.target_class_distribution[word] - else: - self.index_weights[self._lang.word2index[str(word)]] = 1 + if self.target_weights is not None: + self.index_weights[self._lang.word2index[str(word)]] = 1 / self.target_weights[word] + self.index_weights = torch.Tensor(self.index_weights) self.output_size = self._lang.n_words diff --git a/lightwood/encoder/image/helpers/img_to_vec.py b/lightwood/encoder/image/helpers/img_to_vec.py index 88f009bde..bbde255a2 100644 --- a/lightwood/encoder/image/helpers/img_to_vec.py +++ b/lightwood/encoder/image/helpers/img_to_vec.py @@ -6,6 +6,9 @@ class ChannelPoolAdaptiveAvg1d(torch.nn.AdaptiveAvgPool1d): + """ + Custom override of `torch.nn.AdaptiveAvgPool1d` to use `LightwoodAutocast()` and handle dimensions in the way we need to. + """ # noqa def forward(self, input): with LightwoodAutocast(): n, c, _, _ = input.size() @@ -17,10 +20,14 @@ def forward(self, input): class Img2Vec(nn.Module): + """ + Img2Vec is a ``torch.nn.module`` that returns image embeddings. + + For this, it uses a pretrained `torchvision.torch.resnext50_32x4d`, with its final fully connected layer removed. + + Output is a `self.output_size`-dimensioned vector, generated by taking the output of the Resnext's last convolutional layer and performing an adaptive channel pool average. + """ # noqa def __init__(self): - """ Img2Vec - :param model: name of the model to use - """ super(Img2Vec, self).__init__() self.device, _ = get_devices() diff --git a/lightwood/encoder/image/img_2_vec.py b/lightwood/encoder/image/img_2_vec.py index 4e19469de..5f15f7fd7 100644 --- a/lightwood/encoder/image/img_2_vec.py +++ b/lightwood/encoder/image/img_2_vec.py @@ -9,12 +9,25 @@ class Img2VecEncoder(BaseEncoder): + """ + This encoder generates encoded representations for images using a pre-trained deep neural network. + + All input images are rescaled to a standard size of 224x224, and normalized using the mean and standard deviation of the ImageNet dataset (as it was used to train the underlying NN). + + Note that this encoder does not have a .decode() method yet. As such, models that predict images as output are not supported at this time. + + For more information about the neural network this encoder uses, refer to the `lightwood.encoder.image.helpers.img_to_vec.Img2Vec`. + """ # noqa + is_trainable_encoder: bool = True - def __init__(self, stop_after: int = 3600, is_target: bool = False): + def __init__(self, stop_after: float = 3600, is_target: bool = False): + """ + :param stop_after: time budget, in seconds. + :param is_target: whether the encoder corresponds to the target column. This is not currently possible for Img2VecEncoder. + """ # noqa + assert not is_target super().__init__(is_target) - # # I think we should make this an enum, something like: speed, balance, accuracy - # self.aim = aim self.is_prepared = False self._scaler = transforms.Resize((224, 224)) @@ -31,7 +44,10 @@ def __init__(self, stop_after: int = 3600, is_target: bool = False): pil_logger.setLevel(logging.ERROR) def prepare(self, train_priming_data: pd.Series, dev_priming_data: pd.Series): - # @TODO: Add a bit of training here (maybe? depending on time aim) + # @TODO: finetune here? depending on time aim + """ + Instances an `Img2Vec` object and sets the expected size for encoded representations. + """ if self.is_prepared: raise Exception('You can only call "prepare" once for a given encoder.') @@ -40,14 +56,22 @@ def prepare(self, train_priming_data: pd.Series, dev_priming_data: pd.Series): self.is_prepared = True def to(self, device, available_devices): + """ + Moves the model to-and-from CPU and GPU. + + :param device: will move the model to this device. + :param available_devices: all available devices as reported by lightwood. + + :return: same object but moved to the target device. + """ self.model.to(device, available_devices) return self def encode(self, images: List[str]) -> torch.Tensor: """ - Encode list of images + Creates encodings for a list of images; each image is referenced by a filepath or url. - :param images: list of images, each image is a path to a file or a url + :param images: list of images, each image is a path to a file or a url. :return: a torch.floatTensor """ if not self.is_prepared: diff --git a/lightwood/encoder/numeric/numeric.py b/lightwood/encoder/numeric/numeric.py index 1da38da3f..b6afb673f 100644 --- a/lightwood/encoder/numeric/numeric.py +++ b/lightwood/encoder/numeric/numeric.py @@ -1,14 +1,32 @@ import math +from typing import Iterable, List, Union import torch import numpy as np +from torch.types import Number from lightwood.encoder.base import BaseEncoder from lightwood.helpers.log import log from lightwood.helpers.general import is_none +from lightwood.api.dtype import dtype class NumericEncoder(BaseEncoder): + """ + The numeric encoder takes numbers (float or integer) and converts it into tensors of the form: + ``[0 if the number is none, otherwise 1, 1 if the number is positive, otherwise 0, natural_log(abs(number)), number/absolute_mean]`` - def __init__(self, data_type=None, is_target: bool = False, positive_domain: bool = False): + This representation is: ``[1 if the number is positive, otherwise 0, natural_log(abs(number)), number/absolute_mean]]`` if encoding target values, since target values can't be none. + + The ``absolute_mean`` is computed in the ``prepare`` method and is just the mean of the absolute values of all numbers feed to prepare (which are not none) + + ``none`` stands for any number that is an actual python ``None`` value or any sort of non-numeric value (a string, nan, inf) + """ # noqa + + def __init__(self, data_type: dtype = None, is_target: bool = False, positive_domain: bool = False): + """ + :param data_type: The data type of the number (integer, float, quantity) + :param is_target: Indicates whether the encoder refers to a target column or feature column (True==target) + :param positive_domain: Forces the encoder to always output positive values + """ super().__init__(is_target) self._type = data_type self._abs_mean = None @@ -16,31 +34,32 @@ def __init__(self, data_type=None, is_target: bool = False, positive_domain: boo self.decode_log = False self.output_size = 4 if not self.is_target else 3 - def prepare(self, priming_data): + def prepare(self, priming_data: Iterable): + """ + "NumericalEncoder" uses a rule-based form to prepare results on training (priming) data. The averages etc. are taken from this distribution. + + :param priming_data: an iterable data structure containing numbers numbers which will be used to compute the values used for normalizing the encoded representations + """ # noqa if self.is_prepared: raise Exception('You can only call "prepare" once for a given encoder.') value_type = 'int' for number in priming_data: - try: - number = float(number) - except Exception: - continue - - if np.isnan(number): - err = 'Lightwood does not support working with NaN values !' - log.warning(err) - continue - - if int(number) != number: - value_type = 'float' + if not is_none(number): + if int(number) != number: + value_type = 'float' self._type = value_type if self._type is None else self._type - non_null_priming_data = [float(str(x).replace(',', '.')) for x in priming_data if not is_none(x)] + non_null_priming_data = [x for x in priming_data if not is_none(x)] self._abs_mean = np.mean(np.abs(non_null_priming_data)) self.is_prepared = True - def encode(self, data): + def encode(self, data: Iterable): + """ + :param data: An iterable data structure containing the numbers to be encoded + + :returns: A torch tensor with the representations of each number + """ if not self.is_prepared: raise Exception('You need to call "prepare" before calling "encode" or "decode".') @@ -49,18 +68,13 @@ def encode(self, data): try: real = float(real) except Exception: - try: - real = float(real.replace(',', '.')) - except Exception: - real = None + real = None if self.is_target: + # Will crash if ``real`` is not a float, this is fine, targets should always have a value vector = [0] * 3 - if real is not None and self._abs_mean > 0: - vector[0] = 1 if real < 0 and not self.positive_domain else 0 - vector[1] = math.log(abs(real)) if abs(real) > 0 else -20 - vector[2] = real / self._abs_mean - else: - log.debug(f'Can\'t encode target value: {real}') + vector[0] = 1 if real < 0 and not self.positive_domain else 0 + vector[1] = math.log(abs(real)) if abs(real) > 0 else -20 + vector[2] = real / self._abs_mean else: vector = [0] * 4 @@ -80,7 +94,13 @@ def encode(self, data): return torch.Tensor(ret) - def decode(self, encoded_values, decode_log=None) -> list: + def decode(self, encoded_values: Union[List[Number], torch.Tensor], decode_log: bool = None) -> list: + """ + :param encoded_values: The encoded values to decode into single numbers + :param decode_log: Whether to decode the ``log`` or ``linear`` part of the representation, since the encoded vector contains both a log and a linear part + + :returns: The decoded number + """ # noqa if not self.is_prepared: raise Exception('You need to call "prepare" before calling "encode" or "decode".') diff --git a/lightwood/encoder/numeric/ts_array_numeric.py b/lightwood/encoder/numeric/ts_array_numeric.py index 49a457975..b61dae507 100644 --- a/lightwood/encoder/numeric/ts_array_numeric.py +++ b/lightwood/encoder/numeric/ts_array_numeric.py @@ -1,17 +1,23 @@ +from typing import List, Dict, Iterable, Optional + import torch import torch.nn.functional as F + from lightwood.encoder import BaseEncoder from lightwood.encoder.numeric import TsNumericEncoder class TsArrayNumericEncoder(BaseEncoder): - """ - Variant of vanilla numerical encoder, supports dynamic mean re-scaling - """ - def __init__(self, timesteps: int, is_target: bool = False, positive_domain: bool = False, grouped_by=None): + """ + This encoder handles arrays of numerical time series data by wrapping the numerical encoder with behavior specific to time series tasks. + + :param timesteps: length of forecasting horizon, as defined by TimeseriesSettings.window. + :param is_target: whether this encoder corresponds to the target column. + :param positive_domain: whether the column domain is expected to be positive numbers. + :param grouped_by: what columns, if any, are considered to group the original column and yield multiple time series. + """ # noqa super(TsArrayNumericEncoder, self).__init__(is_target=is_target) - # time series normalization params self.normalizers = None self.group_combinations = None self.dependencies = grouped_by @@ -21,23 +27,49 @@ def __init__(self, timesteps: int, is_target: bool = False, positive_domain: boo self.output_size = self.data_window * self.sub_encoder.output_size def prepare(self, priming_data): + """ + This method prepares the underlying time series numerical encoder. + """ if self.is_prepared: raise Exception('You can only call "prepare" once for a given encoder.') self.sub_encoder.prepare(priming_data) self.is_prepared = True - def encode(self, data, dependency_data={}): + def encode(self, data: Iterable[Iterable], dependency_data: Optional[Dict[str, str]] = {}) -> torch.Tensor: """ - :param dependency_data: dict with grouped_by column info, to retrieve the correct normalizer for each datum - :return: tensor with shape (batch, NxK) where N: self.data_window and K: sub-encoder # of output features + Encodes a list of time series arrays using the underlying time series numerical encoder. + + :param data: list of numerical values to encode. Its length is determined by the tss.window parameter, and all data points belong to the same time series. + :param dependency_data: dict with values of each group_by column for the time series, used to retrieve the correct normalizer. + + :return: list of encoded time series arrays. Tensor is (len(data), N x K)-shaped, where N: self.data_window and K: sub-encoder # of output features. """ # noqa if not self.is_prepared: raise Exception('You need to call "prepare" before calling "encode" or "decode".') + if self.sub_encoder.normalizers is None and self.normalizers is not None: + self.sub_encoder.normalizers = self.normalizers if not dependency_data: dependency_data = {'__default': [None] * len(data)} ret = [] + for series in data: + ret.append(self.encode_one(series, dependency_data=dependency_data)) + + return torch.vstack(ret) + + def encode_one(self, data: Iterable, dependency_data: Optional[Dict[str, str]] = {}) -> torch.Tensor: + """ + Encodes a single windowed slice of any given time series. + + :param data: windowed slice of a numerical time series. + :param dependency_data: used to determine the correct normalizer for the input. + + :return: an encoded time series array, as per the underlying `TsNumericEncoder` object. + The output of this encoder for all time steps is concatenated, so the final shape of the tensor is (1, NxK), where N: self.data_window and K: sub-encoder # of output features. + """ # noqa + ret = [] + for data_point in data: ret.append(self.sub_encoder.encode([data_point], dependency_data=dependency_data)) @@ -49,7 +81,15 @@ def encode(self, data, dependency_data={}): return ret - def decode(self, encoded_values, dependency_data=None, return_all=False): + def decode(self, encoded_values, dependency_data=None) -> List[List]: + """ + Decodes a list of encoded arrays into values in their original domains. + + :param encoded_values: encoded slices of numerical time series. + :param dependency_data: used to determine the correct normalizer for the input. + + :return: a list of decoded time series arrays. + """ if not self.is_prepared: raise Exception('You need to call "prepare" before calling "encode" or "decode".') @@ -58,7 +98,21 @@ def decode(self, encoded_values, dependency_data=None, return_all=False): self.sub_encoder.output_size) ret = [] - for encoded_timestep in torch.split(encoded_values, 1, dim=1): - ret.extend(self.sub_encoder.decode(encoded_timestep.squeeze(1), dependency_data=dependency_data)) + for tensor in torch.split(encoded_values, 1, dim=0): + ret.append(self.decode_one(tensor, dependency_data=dependency_data)) + + return ret + def decode_one(self, encoded_value, dependency_data={}) -> List: + """ + Decodes a single window of a time series into its original domain. + + :param encoded_value: encoded slice of a numerical time series. + :param dependency_data: used to determine the correct normalizer for the input. + + :return: a list of length TimeseriesSettings.window with decoded values for the forecasted time series. + """ + ret = [] + for encoded_timestep in torch.split(encoded_value, 1, dim=1): + ret.extend(self.sub_encoder.decode(encoded_timestep.squeeze(1), dependency_data=dependency_data)) return ret diff --git a/lightwood/encoder/numeric/ts_numeric.py b/lightwood/encoder/numeric/ts_numeric.py index 01420c699..bdbc7feec 100644 --- a/lightwood/encoder/numeric/ts_numeric.py +++ b/lightwood/encoder/numeric/ts_numeric.py @@ -2,6 +2,7 @@ import torch import numpy as np from lightwood.encoder.numeric import NumericEncoder +from lightwood.helpers.general import is_none from lightwood.helpers.log import log @@ -48,16 +49,18 @@ def encode(self, data, dependency_data={}): else: mean = self._abs_mean - if real is not None: + if not is_none(real): vector[0] = 1 if real < 0 and not self.positive_domain else 0 vector[1] = real / mean if mean != 0 else real else: - raise Exception(f'Can\'t encode target value: {real}') + pass + # This should raise an exception *once* we fix the TsEncoder such that this doesn't get feed `nan` + # raise Exception(f'Can\'t encode target value: {real}') else: vector = [0] * 3 try: - if real is not None: + if not is_none(real): vector[0] = 1 vector[1] = 1 if real < 0 and not self.positive_domain else 0 vector[2] = real / self._abs_mean diff --git a/lightwood/encoder/text/pretrained.py b/lightwood/encoder/text/pretrained.py index 07047ffe2..d90438bdc 100644 --- a/lightwood/encoder/text/pretrained.py +++ b/lightwood/encoder/text/pretrained.py @@ -95,7 +95,7 @@ class PretrainedLangEncoder(BaseEncoder): def __init__( self, - stop_after: int, + stop_after: float, is_target=False, model_name="distilbert", custom_tokenizer=None, diff --git a/lightwood/encoder/time_series/rnn.py b/lightwood/encoder/time_series/rnn.py index 39ce02002..2e28b6cf2 100644 --- a/lightwood/encoder/time_series/rnn.py +++ b/lightwood/encoder/time_series/rnn.py @@ -30,7 +30,7 @@ class TimeSeriesEncoder(BaseEncoder): is_timeseries_encoder: bool = True is_trainable_encoder: bool = True - def __init__(self, stop_after: int, is_target=False, original_type: str = None, target: str = None, + def __init__(self, stop_after: float, is_target=False, original_type: str = None, target: str = None, grouped_by: List[str] = [], encoder_type='rnn'): super().__init__(is_target) self.device, _ = get_devices() diff --git a/lightwood/encoder/type_encoder_maps/Array.py b/lightwood/encoder/type_encoder_maps/Array.py deleted file mode 100644 index cb65e3963..000000000 --- a/lightwood/encoder/type_encoder_maps/Array.py +++ /dev/null @@ -1,4 +0,0 @@ -from lightwood.encoder.array.array import ArrayEncoder - - -__all__ = ['ArrayEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Audio.py b/lightwood/encoder/type_encoder_maps/Audio.py deleted file mode 100644 index 7e440d36b..000000000 --- a/lightwood/encoder/type_encoder_maps/Audio.py +++ /dev/null @@ -1,5 +0,0 @@ -from lightwood.encoder.audio import MFCCEncoder - - -__all__ = ['MFCCEncoder'] - diff --git a/lightwood/encoder/type_encoder_maps/Binary.py b/lightwood/encoder/type_encoder_maps/Binary.py deleted file mode 100644 index d1d007435..000000000 --- a/lightwood/encoder/type_encoder_maps/Binary.py +++ /dev/null @@ -1,7 +0,0 @@ -from lightwood.encoder.categorical.onehot import OneHotEncoder -from lightwood.encoder.categorical.binary import BinaryEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder -from lightwood.encoder.identity.identity import IdentityEncoder - -__all__ = ['BinaryEncoder', 'OneHotEncoder', 'TimeSeriesEncoder', 'ArrayEncoder', 'IdentityEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Categorical.py b/lightwood/encoder/type_encoder_maps/Categorical.py deleted file mode 100644 index 079767a0b..000000000 --- a/lightwood/encoder/type_encoder_maps/Categorical.py +++ /dev/null @@ -1,7 +0,0 @@ -from lightwood.encoder.categorical.onehot import OneHotEncoder -from lightwood.encoder.categorical.autoencoder import CategoricalAutoEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder - - -__all__ = ['OneHotEncoder', 'CategoricalAutoEncoder', 'TimeSeriesEncoder', 'ArrayEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Date.py b/lightwood/encoder/type_encoder_maps/Date.py deleted file mode 100644 index fe663de22..000000000 --- a/lightwood/encoder/type_encoder_maps/Date.py +++ /dev/null @@ -1,6 +0,0 @@ -from lightwood.encoder.datetime.datetime import DatetimeEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder - - -__all__ = ['DatetimeEncoder', 'TimeSeriesEncoder', 'ArrayEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Datetime.py b/lightwood/encoder/type_encoder_maps/Datetime.py deleted file mode 100644 index fe663de22..000000000 --- a/lightwood/encoder/type_encoder_maps/Datetime.py +++ /dev/null @@ -1,6 +0,0 @@ -from lightwood.encoder.datetime.datetime import DatetimeEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder - - -__all__ = ['DatetimeEncoder', 'TimeSeriesEncoder', 'ArrayEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Float.py b/lightwood/encoder/type_encoder_maps/Float.py deleted file mode 100644 index dbbc5bff6..000000000 --- a/lightwood/encoder/type_encoder_maps/Float.py +++ /dev/null @@ -1,7 +0,0 @@ -from lightwood.encoder.numeric.numeric import NumericEncoder -from lightwood.encoder.numeric.ts_numeric import TsNumericEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder -from lightwood.encoder.identity.identity import IdentityEncoder - -__all__ = ['NumericEncoder', 'TsNumericEncoder', 'TimeSeriesEncoder', 'ArrayEncoder', 'IdentityEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Image.py b/lightwood/encoder/type_encoder_maps/Image.py deleted file mode 100644 index 9fe5fd448..000000000 --- a/lightwood/encoder/type_encoder_maps/Image.py +++ /dev/null @@ -1,4 +0,0 @@ -from lightwood.encoder.image.img_2_vec import Img2VecEncoder - - -__all__ = ['Img2VecEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Integer.py b/lightwood/encoder/type_encoder_maps/Integer.py deleted file mode 100644 index dbbc5bff6..000000000 --- a/lightwood/encoder/type_encoder_maps/Integer.py +++ /dev/null @@ -1,7 +0,0 @@ -from lightwood.encoder.numeric.numeric import NumericEncoder -from lightwood.encoder.numeric.ts_numeric import TsNumericEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder -from lightwood.encoder.identity.identity import IdentityEncoder - -__all__ = ['NumericEncoder', 'TsNumericEncoder', 'TimeSeriesEncoder', 'ArrayEncoder', 'IdentityEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Quantity.py b/lightwood/encoder/type_encoder_maps/Quantity.py deleted file mode 100644 index dbbc5bff6..000000000 --- a/lightwood/encoder/type_encoder_maps/Quantity.py +++ /dev/null @@ -1,7 +0,0 @@ -from lightwood.encoder.numeric.numeric import NumericEncoder -from lightwood.encoder.numeric.ts_numeric import TsNumericEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder -from lightwood.encoder.identity.identity import IdentityEncoder - -__all__ = ['NumericEncoder', 'TsNumericEncoder', 'TimeSeriesEncoder', 'ArrayEncoder', 'IdentityEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Rich_Text.py b/lightwood/encoder/type_encoder_maps/Rich_Text.py deleted file mode 100644 index f92de6015..000000000 --- a/lightwood/encoder/type_encoder_maps/Rich_Text.py +++ /dev/null @@ -1,8 +0,0 @@ -from lightwood.encoder.text.short import ShortTextEncoder -from lightwood.encoder.text.vocab import VocabularyEncoder -from lightwood.encoder.text.rnn import RnnEncoder as TextRnnEncoder -from lightwood.encoder.categorical.autoencoder import CategoricalAutoEncoder -from lightwood.encoder.text.pretrained import PretrainedLangEncoder - - -__all__ = ['ShortTextEncoder', 'VocabularyEncoder', 'TextRnnEncoder', 'CategoricalAutoEncoder', 'PretrainedLangEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Short_Text.py b/lightwood/encoder/type_encoder_maps/Short_Text.py deleted file mode 100644 index f92de6015..000000000 --- a/lightwood/encoder/type_encoder_maps/Short_Text.py +++ /dev/null @@ -1,8 +0,0 @@ -from lightwood.encoder.text.short import ShortTextEncoder -from lightwood.encoder.text.vocab import VocabularyEncoder -from lightwood.encoder.text.rnn import RnnEncoder as TextRnnEncoder -from lightwood.encoder.categorical.autoencoder import CategoricalAutoEncoder -from lightwood.encoder.text.pretrained import PretrainedLangEncoder - - -__all__ = ['ShortTextEncoder', 'VocabularyEncoder', 'TextRnnEncoder', 'CategoricalAutoEncoder', 'PretrainedLangEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/Tags.py b/lightwood/encoder/type_encoder_maps/Tags.py deleted file mode 100644 index 6b32439eb..000000000 --- a/lightwood/encoder/type_encoder_maps/Tags.py +++ /dev/null @@ -1,7 +0,0 @@ -from lightwood.encoder.categorical.multihot import MultiHotEncoder -from lightwood.encoder.text.pretrained import PretrainedLangEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder -from lightwood.encoder.array.array import ArrayEncoder - - -__all__ = ['MultiHotEncoder', 'PretrainedLangEncoder', 'TimeSeriesEncoder', 'ArrayEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/TimeSeries.py b/lightwood/encoder/type_encoder_maps/TimeSeries.py deleted file mode 100644 index 2c8a7338e..000000000 --- a/lightwood/encoder/type_encoder_maps/TimeSeries.py +++ /dev/null @@ -1,5 +0,0 @@ -from lightwood.encoder.numeric.ts_array_numeric import TsArrayNumericEncoder -from lightwood.encoder.time_series.rnn import TimeSeriesEncoder - - -__all__ = ['TsArrayNumericEncoder', 'TimeSeriesEncoder'] diff --git a/lightwood/encoder/type_encoder_maps/__init__.py b/lightwood/encoder/type_encoder_maps/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/lightwood/helpers/accuracy.py b/lightwood/helpers/accuracy.py new file mode 100644 index 000000000..74f652462 --- /dev/null +++ b/lightwood/helpers/accuracy.py @@ -0,0 +1,14 @@ +from sklearn.metrics import r2_score as sk_r2_score + + +def r2_score(y_true, y_pred) -> float: + """ Wrapper for sklearn R2 score, lower capped between 0 and 1""" + acc = sk_r2_score(y_true, y_pred) + # Cap at 0 + if acc < 0: + acc = 0 + # Guard against overflow (> 1 means overflow of negative score) + if acc > 1: + acc = 0 + + return acc diff --git a/lightwood/helpers/general.py b/lightwood/helpers/general.py index b60681a0f..dcca1d3a5 100644 --- a/lightwood/helpers/general.py +++ b/lightwood/helpers/general.py @@ -65,7 +65,11 @@ def evaluate_accuracy(data: pd.DataFrame, ts_analysis=ts_analysis) else: true_values = data[target].tolist() - accuracy_function = getattr(importlib.import_module('sklearn.metrics'), accuracy_function_str) + if hasattr(importlib.import_module('lightwood.helpers.accuracy'), accuracy_function_str): + accuracy_function = getattr(importlib.import_module('lightwood.helpers.accuracy'), + accuracy_function_str) + else: + accuracy_function = getattr(importlib.import_module('sklearn.metrics'), accuracy_function_str) score_dict[accuracy_function_str] = accuracy_function(list(true_values), list(predictions)) return score_dict diff --git a/lightwood/helpers/text.py b/lightwood/helpers/text.py index 992db70d5..f933f8582 100644 --- a/lightwood/helpers/text.py +++ b/lightwood/helpers/text.py @@ -13,6 +13,7 @@ import json import re import hashlib +from typing import Iterable import numpy as np import scipy.stats as st import langdetect @@ -208,9 +209,14 @@ def get_identifier_description_mp(arg_tup): return get_identifier_description(data, column_name, data_dtype) -def get_identifier_description(data, column_name, data_dtype): +def get_identifier_description(data: Iterable, column_name: str, data_dtype: dtype): data = list(data) - unquie_pct = len(set(data)) / len(data) + nr_unique = len(set(data)) + + if nr_unique == 1: + return 'No Information' + + unique_pct = nr_unique / len(data) spaces = [len(str(x).split(' ')) - 1 for x in data] mean_spaces = np.mean(spaces) @@ -218,7 +224,7 @@ def get_identifier_description(data, column_name, data_dtype): # Detect auto incrementing index # -- some cases where I guess people do want to use this for learning, so ignoring this check for now... # if data_dtype == dtype.integer: - # if get_pct_auto_increment(data) > 0.98 and unquie_pct > 0.99: + # if get_pct_auto_increment(data) > 0.98 and unique_pct > 0.99: # return 'Auto-incrementing identifier' # Detect hash @@ -244,7 +250,7 @@ def get_identifier_description(data, column_name, data_dtype): return 'Foreign key' if _is_identifier_name(column_name) or data_dtype in (dtype.categorical, dtype.binary): - if unquie_pct > 0.98: + if unique_pct > 0.98: if is_uuid: return 'UUID' else: @@ -252,7 +258,7 @@ def get_identifier_description(data, column_name, data_dtype): # Everything is unique and it's too short to be rich text if data_dtype in (dtype.categorical, dtype.short_text, dtype.rich_text) and \ - unquie_pct > 0.99999 and mean_spaces < 1: + unique_pct > 0.99999 and mean_spaces < 1: return 'Unknown identifier' return None diff --git a/lightwood/mixer/base.py b/lightwood/mixer/base.py index dcea431c2..affa9ab02 100644 --- a/lightwood/mixer/base.py +++ b/lightwood/mixer/base.py @@ -26,10 +26,8 @@ class BaseMixer: fit_data_len: int # @TODO (Patricio): should this really be in `BaseMixer`? supports_proba: bool - def __init__(self, stop_after: int): + def __init__(self, stop_after: float): """ - Initializer a mixer. - :param stop_after: Time budget to train this mixer. """ self.stop_after = stop_after diff --git a/lightwood/mixer/lightgbm.py b/lightwood/mixer/lightgbm.py index 97caf0a54..d14aa89f3 100644 --- a/lightwood/mixer/lightgbm.py +++ b/lightwood/mixer/lightgbm.py @@ -1,6 +1,5 @@ import time from typing import Dict, List, Set - import torch import optuna import lightgbm @@ -10,6 +9,7 @@ import optuna.integration.lightgbm as optuna_lightgbm from lightwood.api import dtype +from lightwood.encoder.base import BaseEncoder from lightwood.helpers.log import log from lightwood.mixer.base import BaseMixer from lightwood.helpers.device import get_devices @@ -47,10 +47,31 @@ class LightGBM(BaseMixer): use_optuna: bool supports_proba: bool + """ + Gradient boosting mixer with a LightGBM backbone. + + This mixer is a good all-rounder, due to the generally great performance of tree-based ML algorithms for supervised learning tasks with tabular data. + If you want more information regarding the techniques that set apart LightGBM from other gradient boosters, please refer to their technical paper: "LightGBM: A Highly Efficient Gradient Boosting Decision Tree" (2017). + + We can basically think of this mixer as a wrapper to the LightGBM interface. To do so, there are a few caveats the user may want to be aware about: + * If you seek GPU utilization, LightGBM must be compiled from source instead of being installed through `pip`. + * Integer, float, and quantity `dtype`s are treated as regression tasks with `L2` loss. All other supported `dtype`s is casted as a multiclass task with `multi_logloss` loss. + * It has an automatic optuna-based hyperparameter search. This procedure triggers when a single iteration of LightGBM is deemed fast enough (given the time budget). + * A partial fit can be performed with the `dev` data split as part of `fit`, if specified with the `fit_on_dev` argument. + """ # noqa + def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], + self, stop_after: float, target: str, dtype_dict: Dict[str, str], input_cols: List[str], - fit_on_dev: bool, use_optuna: bool = True): + fit_on_dev: bool, use_optuna: bool, target_encoder: BaseEncoder): + """ + :param stop_after: time budget in seconds. + :param target: name of the target column that the mixer will learn to predict. + :param dtype_dict: dictionary with dtypes of all columns in the data. + :param input_cols: list of column names. + :param fit_on_dev: whether to perform a `partial_fit()` at the end of `fit()` using the `dev` data split. + :param use_optuna: whether to activate the automated hyperparameter search (optuna-based). Note that setting this flag to `True` does not guarantee the search will run, rather, the speed criteria will be checked first (i.e., if a single iteration is too slow with respect to the time budget, the search will not take place). + """ # noqa super().__init__(stop_after) self.model = None self.ordinal_encoder = None @@ -64,6 +85,7 @@ def __init__( self.fit_on_dev = fit_on_dev self.supports_proba = dtype_dict[target] in [dtype.binary, dtype.categorical] self.stable = True + self.target_encoder = target_encoder # GPU Only available via --install-option=--gpu with opencl-dev and libboost dev (a bunch of them) installed, so let's turn this off for now and we can put it behind some flag later # noqa gpu_works = check_gpu_support() @@ -77,7 +99,16 @@ def __init__( self.max_bin = 255 - def _to_dataset(self, data, output_dtype): + def _to_dataset(self, data: Dict[str, Dict], output_dtype: str): + """ + Helper method to wrangle data into the format that the underlying model requires. + + :param data: Includes train and dev data datasources. + :param output_dtype + :return: modified `data` object that conforms to LightGBM's expected format. + """ + weight_map = getattr(self.target_encoder, 'target_weights', None) + for subset_name in data.keys(): for input_col in self.input_cols: if data[subset_name]['data'] is None: @@ -87,10 +118,11 @@ def _to_dataset(self, data, output_dtype): enc_col = data[subset_name]['ds'].get_encoded_column_data(input_col) data[subset_name]['data'] = torch.cat((data[subset_name]['data'], enc_col.to(self.device)), 1) - data[subset_name]['data'] = data[subset_name]['data'].numpy() + data[subset_name]['data'] = data[subset_name]['data'].cpu().numpy() label_data = data[subset_name]['ds'].get_column_original_data(self.target) + data[subset_name]['weights'] = None if output_dtype in (dtype.categorical, dtype.binary): if subset_name == 'train': self.ordinal_encoder = OrdinalEncoder() @@ -99,6 +131,8 @@ def _to_dataset(self, data, output_dtype): self.ordinal_encoder.fit(np.array(list(self.label_set)).reshape(-1, 1)) label_data = [x if x in self.label_set else '__mdb_unknown_cat' for x in label_data] + if weight_map is not None: + data[subset_name]['weights'] = [weight_map[x] for x in label_data] label_data = self.ordinal_encoder.transform(np.array(label_data).reshape(-1, 1)).flatten() elif output_dtype == dtype.integer: label_data = label_data.clip(-pow(2, 63), pow(2, 63)).astype(int) @@ -110,6 +144,12 @@ def _to_dataset(self, data, output_dtype): return data def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: + """ + Fits the LightGBM model. + + :param train_data: encoded features for training dataset + :param dev_data: encoded features for dev dataset + """ log.info('Started fitting LGBM model') data = { 'train': {'ds': train_data, 'data': None, 'label_data': {}}, @@ -148,10 +188,22 @@ def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: # Determine time per iterations start = time.time() self.params['num_iterations'] = 1 - self.model = lightgbm.train(self.params, lightgbm.Dataset( - data['train']['data'], - label=data['train']['label_data']), - verbose_eval=False) + ''' + Why construst a dataset here instead of using the training dataset? + + Because it guards against the following crash: + + WARNING:lightwood-1613058:Exception: Cannot change feature_pre_filter after constructed Dataset handle. when training mixer: + feature_fraction, val_score: inf: 0%| | 0/7 [00:00 None: else: model_generator = lightgbm - # Prepare the data - train_dataset = lightgbm.Dataset(data['train']['data'], label=data['train']['label_data']) - dev_dataset = lightgbm.Dataset(data['dev']['data'], label=data['dev']['label_data']) - # Train the models log.info( f'Training GBM ({model_generator}) with {self.num_iterations} iterations given {self.stop_after} seconds constraint') # noqa @@ -182,6 +230,13 @@ def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: self.params['early_stopping_rounds'] = 5 + # Prepare the data + train_dataset = lightgbm.Dataset(data['train']['data'], label=data['train']['label_data'], + weight=data['train']['weights']) + + dev_dataset = lightgbm.Dataset(data['dev']['data'], label=data['dev']['label_data'], + weight=data['dev']['weights']) + self.model = model_generator.train( self.params, train_dataset, valid_sets=[dev_dataset, train_dataset], valid_names=['dev', 'train'], @@ -193,6 +248,12 @@ def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: self.partial_fit(dev_data, train_data) def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: + """ + Updates the LightGBM model. + + :param train_data: encoded features for (new) training dataset + :param dev_data: encoded features for (new) dev dataset + """ pct_of_original = len(train_data) / self.fit_data_len iterations = max(1, int(self.num_iterations * pct_of_original) / 2) @@ -202,8 +263,10 @@ def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: output_dtype = self.dtype_dict[self.target] data = self._to_dataset(data, output_dtype) - train_dataset = lightgbm.Dataset(data['retrain']['data'], label=data['retrain']['label_data']) - dev_dataset = lightgbm.Dataset(data['dev']['data'], label=data['dev']['label_data']) + train_dataset = lightgbm.Dataset(data['retrain']['data'], label=data['retrain']['label_data'], + weight=data['retrain']['weights']) + dev_dataset = lightgbm.Dataset(data['dev']['data'], label=data['dev']['label_data'], + weight=data['dev']['weights']) log.info(f'Updating lightgbm model with {iterations} iterations') if iterations < 1: @@ -217,6 +280,14 @@ def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: def __call__(self, ds: EncodedDs, args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: + """ + Call a trained LightGBM mixer to output predictions for the target column. + + :param ds: input data with values for all non-target columns. + :param args: inference-time arguments (e.g. whether to output predicted labels or probabilities). + + :return: dataframe with predictions. + """ data = None for input_col in self.input_cols: if data is None: @@ -224,7 +295,7 @@ def __call__(self, ds: EncodedDs, else: data = torch.cat((data, ds.get_encoded_column_data(input_col).to(self.device)), 1) - data = data.numpy() + data = data.cpu().numpy() raw_predictions = self.model.predict(data) if self.ordinal_encoder is not None: diff --git a/lightwood/mixer/lightgbm_array.py b/lightwood/mixer/lightgbm_array.py index 36aedd016..d6385cd96 100644 --- a/lightwood/mixer/lightgbm_array.py +++ b/lightwood/mixer/lightgbm_array.py @@ -3,6 +3,7 @@ from typing import Dict, List, Union from lightwood.api import dtype +from lightwood.encoder.base import BaseEncoder from lightwood.helpers.log import log from lightwood.mixer.base import BaseMixer from lightwood.mixer.lightgbm import LightGBM @@ -19,14 +20,15 @@ class LightGBMArray(BaseMixer): supports_proba: bool def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], + self, stop_after: float, target: str, dtype_dict: Dict[str, str], input_cols: List[str], - n_ts_predictions: int, fit_on_dev: bool): + n_ts_predictions: int, fit_on_dev: bool, target_encoder: BaseEncoder): super().__init__(stop_after) self.submodel_stop_after = stop_after / n_ts_predictions self.target = target dtype_dict[target] = dtype.float - self.models = [LightGBM(self.submodel_stop_after, target, dtype_dict, input_cols, fit_on_dev, use_optuna=False) + self.models = [LightGBM(self.submodel_stop_after, target, dtype_dict, input_cols, fit_on_dev, + False, target_encoder) for _ in range(n_ts_predictions)] self.n_ts_predictions = n_ts_predictions # for time series tasks, how long is the forecast horizon self.supports_proba = False diff --git a/lightwood/mixer/neural.py b/lightwood/mixer/neural.py index 02a2065d4..062b0ee61 100644 --- a/lightwood/mixer/neural.py +++ b/lightwood/mixer/neural.py @@ -1,6 +1,6 @@ import time from copy import deepcopy -from typing import Dict, List +from typing import Dict, List, Optional import torch import numpy as np @@ -34,9 +34,9 @@ class Neural(BaseMixer): supports_proba: bool def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], + self, stop_after: float, target: str, dtype_dict: Dict[str, str], timeseries_settings: TimeseriesSettings, target_encoder: BaseEncoder, net: str, fit_on_dev: bool, - search_hyperparameters: bool): + search_hyperparameters: bool, n_epochs: Optional[int] = None): """ The Neural mixer trains a fully connected dense network from concatenated encoded outputs of each of the features in the dataset to predicted the encoded output. @@ -48,6 +48,7 @@ def __init__( :param net: The network type to use (`DeafultNet` or `ArNet`) :param fit_on_dev: If we should fit on the dev dataset :param search_hyperparameters: If the network should run a more through hyperparameter search (currently disabled) + :param n_epochs: amount of epochs that the network will be trained for. Supersedes all other early stopping criteria if specified. """ # noqa super().__init__(stop_after) self.dtype_dict = dtype_dict @@ -55,6 +56,7 @@ def __init__( self.timeseries_settings = timeseries_settings self.target_encoder = target_encoder self.epochs_to_best = 0 + self.n_epochs = n_epochs self.fit_on_dev = fit_on_dev self.net_class = DefaultNet if net == 'DefaultNet' else ArNet self.supports_proba = dtype_dict[target] in [dtype.binary, dtype.categorical] @@ -207,15 +209,22 @@ def _max_fit(self, train_dl, dev_dl, criterion, optimizer, scaler, stop_after, r best_model = deepcopy(self.model) epochs_to_best = epoch - if len(running_errors) >= 5: - delta_mean = np.average([running_errors[-i - 1] - running_errors[-i] for i in range(1, 5)], - weights=[(1 / 2)**i for i in range(1, 5)]) - if delta_mean <= 0: + # manually set epoch limit + if self.n_epochs is not None: + if epoch > self.n_epochs: + break + + # automated early stopping + else: + if len(running_errors) >= 5: + delta_mean = np.average([running_errors[-i - 1] - running_errors[-i] for i in range(1, 5)], + weights=[(1 / 2)**i for i in range(1, 5)]) + if delta_mean <= 0: + break + elif (time.time() - started) > stop_after: + break + elif running_errors[-1] < 0.0001 or train_error < 0.0001: break - elif (time.time() - started) > stop_after: - break - elif running_errors[-1] < 0.0001 or train_error < 0.0001: - break if np.isnan(best_dev_error): best_dev_error = pow(2, 32) @@ -250,7 +259,7 @@ def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: """ Fits the Neural mixer on some data, making it ready to predit - :param train_data: The EncodedDs on which to train the network + :param train_data: The network is fit/trained on this :param dev_data: Data used for early stopping and hyperparameter determination """ # ConcatedEncodedDs @@ -280,13 +289,15 @@ def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: if self.fit_on_dev: self.partial_fit(dev_data, train_data) - self._final_tuning(dev_data) + + if not self.timeseries_settings.is_timeseries: + self._final_tuning(dev_data) def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: """ Augments the mixer's fit with new data, nr of epochs is based on the amount of epochs the original fitting took - :param train_data: The EncodedDs on which to train the network + :param train_data: The network is fit/trained on this :param dev_data: Data used for early stopping and hyperparameter determination """ @@ -303,10 +314,10 @@ def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: def __call__(self, ds: EncodedDs, args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: """ - Make predictions based on datasource similar to the one used to fit (sans the target column) + Make predictions based on datasource with the same features as the ones used for fitting - :param ds: The EncodedDs for which to generate the predictions - :param arg: Argument for predicting + :param ds: Predictions are generate from it + :param arg: Any additional arguments used in predicting :returns: A dataframe cotaining the decoded predictions and (depending on the args) additional information such as the probabilites for each target class """ # noqa @@ -332,10 +343,7 @@ def __call__(self, ds: EncodedDs, else: decoded_prediction = self.target_encoder.decode(Yh, **kwargs) - if not self.timeseries_settings.is_timeseries or self.timeseries_settings.nr_predictions == 1: - decoded_predictions.extend(decoded_prediction) - else: - decoded_predictions.append(decoded_prediction) + decoded_predictions.extend(decoded_prediction) ydf = pd.DataFrame({'prediction': decoded_predictions}) diff --git a/lightwood/mixer/qclassic.py b/lightwood/mixer/qclassic.py index 760989f60..e9aa031cb 100644 --- a/lightwood/mixer/qclassic.py +++ b/lightwood/mixer/qclassic.py @@ -9,7 +9,7 @@ class QClassic(Neural): # wrapper class to be combined with Neural class when performance stabilizes def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], + self, stop_after: float, target: str, dtype_dict: Dict[str, str], input_cols: List[str], timeseries_settings: TimeseriesSettings, target_encoder: BaseEncoder, net: str, fit_on_dev: bool, search_hyperparameters: bool): diff --git a/lightwood/mixer/regression.py b/lightwood/mixer/regression.py index fae39bcc6..fa3c383ad 100644 --- a/lightwood/mixer/regression.py +++ b/lightwood/mixer/regression.py @@ -12,11 +12,27 @@ class Regression(BaseMixer): + """ + The `Regression` mixer inherits from scikit-learn's `LinearRegression` class + (https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LinearRegression.html) + + This class performs Ordinary Least-squares Regression (OLS) under the hood; + this means it fits a set of coefficients (w_1, w_2, ..., w_N) for an N-length feature vector, that minimize the difference + between the predicted target value and the observed true value. + + This mixer intakes featurized (encoded) data to predict the target. It deploys if the target data-type is considered numerical/integer. + """ # noqa model: LinearRegression label_map: dict supports_proba: bool - def __init__(self, stop_after: int, target_encoder: BaseEncoder, dtype_dict: dict, target: str): + def __init__(self, stop_after: float, target_encoder: BaseEncoder, dtype_dict: dict, target: str): + """ + :param stop_after: Maximum amount of seconds it should fit for, currently ignored + :param target_encoder: The encoder which will be used to decode the target + :param dtype_dict: A map of feature names and their data types + :param target: Name of the target column + """ # noqa super().__init__(stop_after) self.target_encoder = target_encoder self.target_dtype = dtype_dict[target] @@ -25,6 +41,12 @@ def __init__(self, stop_after: int, target_encoder: BaseEncoder, dtype_dict: dic self.stable = False def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: + """ + Fits `LinearRegression` model on input feature data to provide predictions. + + :param train_data: Regression if fit on this + :param dev_data: This just gets concatenated to the ``train_data`` + """ if self.target_dtype not in (dtype.float, dtype.integer, dtype.quantity): raise Exception(f'Unspported {self.target_dtype} type for regression') log.info('Fitting Linear Regression model') @@ -41,10 +63,24 @@ def fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: log.info(f'Regression based correlation of: {self.model.score(X, Y)}') def partial_fit(self, train_data: EncodedDs, dev_data: EncodedDs) -> None: + """ + Fits the linear regression on some data, this refits the model entirely rather than updating it + + :param train_data: Regression is fit on this + :param dev_data: This just gets concatenated to the ``train_data`` + """ self.fit(train_data, dev_data) def __call__(self, ds: EncodedDs, args: PredictionArguments = PredictionArguments()) -> pd.DataFrame: + """ + Make predictions based on datasource with the same features as the ones used for fitting + + :param ds: Predictions are generate from it + :param arg: Any additional arguments used in predicting + + :returns: A dataframe cotaining the decoded predictions and (depending on the args) additional information such as the probabilites for each target class + """ # noqa X = [] for x, _ in ds: X.append(x.tolist()) diff --git a/lightwood/mixer/sktime.py b/lightwood/mixer/sktime.py index 4f2b71772..0fecb3106 100644 --- a/lightwood/mixer/sktime.py +++ b/lightwood/mixer/sktime.py @@ -18,7 +18,7 @@ class SkTime(BaseMixer): supports_proba: bool def __init__( - self, stop_after: int, target: str, dtype_dict: Dict[str, str], + self, stop_after: float, target: str, dtype_dict: Dict[str, str], n_ts_predictions: int, ts_analysis: Dict): super().__init__(stop_after) self.target = target diff --git a/lightwood/mixer/unit.py b/lightwood/mixer/unit.py index fbc17dcc3..cb7e2b1eb 100644 --- a/lightwood/mixer/unit.py +++ b/lightwood/mixer/unit.py @@ -18,7 +18,7 @@ class Unit(BaseMixer): - def __init__(self, stop_after: int, target_encoder: BaseEncoder): + def __init__(self, stop_after: float, target_encoder: BaseEncoder): super().__init__(stop_after) self.target_encoder = target_encoder self.supports_proba = False diff --git a/tests/integration/advanced/test_timeseries.py b/tests/integration/advanced/test_timeseries.py index 6c0c0163c..f3cba25e6 100644 --- a/tests/integration/advanced/test_timeseries.py +++ b/tests/integration/advanced/test_timeseries.py @@ -51,10 +51,10 @@ def calculate_duration(self, predictor, train, time_aim_expected): start = time.process_time() predictor.learn(train) time_aim_actual = (time.process_time() - start) - if((time_aim_expected * 5) < time_aim_actual): + if((time_aim_expected * 10) < time_aim_actual): error = 'time_aim is set to {} seconds, however learning took {}'.format(time_aim_expected, time_aim_actual) raise ValueError(error) - assert (time_aim_expected * 5) >= time_aim_actual + assert (time_aim_expected * 10) >= time_aim_actual return predictor def test_0_time_series_grouped_regression(self): @@ -184,6 +184,7 @@ def test_3_time_series_sktime_mixer(self): df = pd.DataFrame(columns=['Time', target]) df['Time'] = t df[target] = ts + df[f'{target}_2x'] = 2 * ts train = df[:int(len(df) * 0.8)] test = df[int(len(df) * 0.8):] @@ -193,7 +194,8 @@ def test_3_time_series_sktime_mixer(self): 'timeseries_settings': { 'order_by': ['Time'], 'window': 5, - 'nr_predictions': 20 + 'nr_predictions': 20, + 'historical_columns': [f'{target}_2x'] }}) json_ai = json_ai_from_problem(df, problem_definition=pdef) @@ -209,5 +211,11 @@ def test_3_time_series_sktime_mixer(self): predictor.learn(train) ps = predictor.predict(test) - assert r2_score(ps['truth'].values, ps['prediction'].iloc[0]) >= 0.95 + + # test historical columns asserts + test[f'{target}_2x'].iloc[0] = np.nan + self.assertRaises(Exception, predictor.predict, test) + + test.pop(f'{target}_2x') + self.assertRaises(Exception, predictor.predict, test) diff --git a/tests/unit_tests/encoder/date/test_datetime.py b/tests/unit_tests/encoder/date/test_datetime.py index 19cc2532c..6b6422064 100644 --- a/tests/unit_tests/encoder/date/test_datetime.py +++ b/tests/unit_tests/encoder/date/test_datetime.py @@ -2,17 +2,21 @@ from datetime import datetime import numpy as np from dateutil.parser import parse as parse_datetime +import torch from lightwood.encoder.datetime.datetime import DatetimeEncoder from lightwood.encoder.datetime.datetime_sin_normalizer import DatetimeNormalizerEncoder class TestDatetimeEncoder(unittest.TestCase): def test_decode(self): - data = [1555943147, None, 1555943147] + data = [1555943147, None, 1555943147, '', np.nan] enc = DatetimeEncoder() enc.prepare([]) - dec_data = enc.decode(enc.encode(data)) + encoded_repr = enc.encode(data) + assert not torch.isinf(encoded_repr).any() + assert not torch.isnan(encoded_repr).any() + dec_data = enc.decode(encoded_repr) for d in dec_data: assert d in data diff --git a/tests/unit_tests/encoder/numeric/test_numeric.py b/tests/unit_tests/encoder/numeric/test_numeric.py index 16f2caa7f..93590c071 100644 --- a/tests/unit_tests/encoder/numeric/test_numeric.py +++ b/tests/unit_tests/encoder/numeric/test_numeric.py @@ -1,7 +1,17 @@ import unittest import numpy as np +import torch from lightwood.encoder.numeric import NumericEncoder from lightwood.encoder.numeric import TsNumericEncoder +from lightwood.helpers.general import is_none + + +def _pollute(array): + return [ + array + [np.nan], + array + [np.inf], + array + [None] + ] class TestNumericEncoder(unittest.TestCase): @@ -42,7 +52,6 @@ def test_positive_domain(self): def test_log_overflow_and_none(self): data = list(range(-2000, 2000, 66)) - data.extend([None] * 200) encoder = NumericEncoder() encoder.is_target = True @@ -53,3 +62,55 @@ def test_log_overflow_and_none(self): for i in range(0, 70, 10): encoder.decode([[0, pow(2, i), 0]]) + + def test_nan_encoding(self): + # Generate some numbers + data = list(range(-50, 50, 2)) + + # Add invalid values to the data + invalid_data = _pollute(data) + + # Prepare with the correct data and decode invalid data + encoder = NumericEncoder() + encoder.prepare(data) + for array in invalid_data: + # Make sure the encoding has no nans or infs + encoded_repr = encoder.encode(array) + assert not torch.isnan(encoded_repr).any() + assert not torch.isinf(encoded_repr).any() + + # Make sure the invalid value is decoded as `None` and the rest as numbers + decoded_repr = encoder.decode(encoded_repr) + for x in decoded_repr[:-1]: + assert not is_none(x) + assert decoded_repr[-1] is None + + # Prepare with the invalid data and decode the valid data + for array in invalid_data: + encoder = NumericEncoder() + encoder.prepare(array) + + # Make sure the encoding has no nans or infs + encoded_repr = encoder.encode(data) + assert not torch.isnan(encoded_repr).any() + assert not torch.isinf(encoded_repr).any() + + # Make sure the invalid value is decoded as `None` and the rest as numbers + decoded_repr = encoder.decode(encoded_repr) + for x in decoded_repr: + assert not is_none(x) + + # Prepare with the invalid data and decode invalid data + for array in invalid_data: + encoder = NumericEncoder() + encoder.prepare(array) + # Make sure the encoding has no nans or infs + encoded_repr = encoder.encode(array) + assert not torch.isnan(encoded_repr).any() + assert not torch.isinf(encoded_repr).any() + + # Make sure the invalid value is decoded as `None` and the rest as numbers + decoded_repr = encoder.decode(encoded_repr) + for x in decoded_repr[:-1]: + assert not is_none(x) + assert decoded_repr[-1] is None